/// <summary> /// 分块下载 /// </summary> /// <param name="bufferedStream"></param> /// <param name="startPos"></param> /// <param name="endPos"></param> /// <param name="localFilePath"></param> /// <param name="bucketName"></param> /// <param name="fileKey"></param> private static void Download(BufferedStream bufferedStream, long startPos, long endPos, String localFilePath, String bucketName, String fileKey) { Stream contentStream = null; try { var getObjectRequest = new GetObjectRequest(bucketName, fileKey); getObjectRequest.SetRange(startPos, endPos); var ossObject = client.GetObject(getObjectRequest); byte[] buffer = new byte[1024 * 1024]; var bytesRead = 0; bufferedStream.Seek(startPos, SeekOrigin.Begin); contentStream = ossObject.Content; while ((bytesRead = contentStream.Read(buffer, 0, buffer.Length)) > 0) { bufferedStream.Write(buffer, 0, bytesRead); } } finally { if (contentStream != null) { contentStream.Dispose(); } } }
public Stream Download( string name ) { using ( AmazonS3 client = AWSClientFactory.CreateAmazonS3Client( this.amazonKey, this.amazonSecret ) ) { GetObjectRequest request = new GetObjectRequest { BucketName = this.amazonBucket, Key = name }; try { GetObjectResponse response = client.GetObject( request ); MemoryStream ms = new MemoryStream(); response.ResponseStream.CopyTo( ms ); ms.Position = 0; return ms; } catch ( AmazonS3Exception ex ) { if ( ex.ErrorCode == "NoSuchKey" ) { return null; // File doesn't exist } else { throw; } } } }
/// <summary> /// Gets an existing APObject by type and id. /// </summary> /// <param name="type">Object type (schema name).</param> /// <param name="id">Object id.</param> /// <param name="fields">The object fields to be retrieved.</param> /// <param name="options">Request specific api options. These will override the global settings for the app for this request.</param> /// <returns>The matching APObject instance.</returns> public async static Task<APObject> GetAsync(string type, string id, IEnumerable<string> fields = null, ApiOptions options = null) { var request = new GetObjectRequest() { Id = id, Type = type, }; if (fields != null) request.Fields.AddRange(fields); ApiOptions.Apply(request, options); var response = await request.ExecuteAsync(); if (response.Status.IsSuccessful == false) throw response.Status.ToFault(); Debug.Assert(response.Object != null, "For a successful get call, object should always be returned."); return response.Object; }
public static void GetObjectByRequest(string bucketName) { try { client.PutObject(bucketName, key, fileToUpload); var request = new GetObjectRequest(bucketName, key); request.SetRange(0, 100); var result = client.GetObject(request); Console.WriteLine("Get object succeeded, length:{0}", result.Metadata.ContentLength); } catch (OssException ex) { Console.WriteLine("Failed with error code: {0}; Error info: {1}. \nRequestID:{2}\tHostID:{3}", ex.ErrorCode, ex.Message, ex.RequestId, ex.HostId); } catch (Exception ex) { Console.WriteLine("Failed with error info: {0}", ex.Message); } }
public static async Task MultipartEncryptionTestAsync(AmazonS3Client s3EncryptionClient, AmazonS3Client s3DecryptionClient, string bucketName) { var filePath = Path.GetTempFileName(); var retrievedFilepath = Path.GetTempFileName(); var totalSize = MegaBytesSize * 15; UtilityMethods.GenerateFile(filePath, totalSize); var key = Guid.NewGuid().ToString(); Stream inputStream = File.OpenRead(filePath); try { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.OneZoneInfrequentAccess, ContentType = "text/html", }; InitiateMultipartUploadResponse initResponse = await s3EncryptionClient.InitiateMultipartUploadAsync(initRequest).ConfigureAwait(false); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MegaBytesSize, InputStream = inputStream, }; UploadPartResponse up1Response = await s3EncryptionClient.UploadPartAsync(uploadRequest).ConfigureAwait(false); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MegaBytesSize, InputStream = inputStream }; UploadPartResponse up2Response = await s3EncryptionClient.UploadPartAsync(uploadRequest).ConfigureAwait(false); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; UploadPartResponse up3Response = await s3EncryptionClient.UploadPartAsync(uploadRequest).ConfigureAwait(false); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = await s3EncryptionClient.ListPartsAsync(listPartRequest).ConfigureAwait(false); Assert.Equal(3, listPartResponse.Parts.Count); Assert.Equal(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.Equal(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.Equal(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.Equal(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.Equal(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.Equal(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = await s3EncryptionClient.ListPartsAsync(listPartRequest).ConfigureAwait(false); Assert.Single(listPartResponse.Parts); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = await s3EncryptionClient.CompleteMultipartUploadAsync(compRequest).ConfigureAwait(false); Assert.Equal(bucketName, compResponse.BucketName); Assert.NotNull(compResponse.ETag); Assert.Equal(key, compResponse.Key); Assert.NotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = await s3DecryptionClient.GetObjectAsync(getRequest).ConfigureAwait(false); await getResponse.WriteResponseStreamToFileAsync(retrievedFilepath, false, CancellationToken.None); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = await s3DecryptionClient.GetObjectMetadataAsync(metaDataRequest).ConfigureAwait(false); Assert.Equal("text/html", metaDataResponse.Headers.ContentType); } finally { inputStream.Dispose(); if (File.Exists(filePath)) { File.Delete(filePath); } if (File.Exists(retrievedFilepath)) { File.Delete(retrievedFilepath); } } }
public static void Main(string[] args) { System.Net.ServicePointManager.ServerCertificateValidationCallback = ((sender, certificate, chain, sslPolicyErrors) => true); // create the AWS S3 client AmazonS3Client s3 = AWSS3Factory.getS3Client(); String bucketName = String.Join("-", AWSS3Factory.S3_BUCKET, DateTime.Now.ToString("yyyyMMddHHmmss")); //********************// // 1. Create a bucket // //********************// Console.Write(string.Format(" [*] Creating bucket '{0}'... ", bucketName)); PutBucketResponse pbRes = s3.PutBucket(bucketName); if (pbRes.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); //*******************************************// // 2. Enable object versioning on the bucket // //*******************************************// Console.Write(string.Format(" [*] Enabling bucket versioning for bucket '{0}'... ", bucketName)); PutBucketVersioningRequest pvr = new PutBucketVersioningRequest() { BucketName = bucketName, VersioningConfig = new S3BucketVersioningConfig() { Status = VersionStatus.Enabled } }; PutBucketVersioningResponse pvrResponse = s3.PutBucketVersioning(pvr); if (pvrResponse.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); //************************************// // 3. Create a new object (version 1) // //************************************// String objectKey = "object-" + DateTime.Now.ToString("yyyyMMddHHmmssffff"); Console.Write(string.Format(" [*] Creating a new object with key '{0}'... ", objectKey)); PutObjectRequest poRequest = new PutObjectRequest() { BucketName = bucketName, ContentBody = "Lorem ipsum dolor sit amet, consectetur adipiscing elit...", Key = objectKey }; PutObjectResponse poResponse = s3.PutObject(poRequest); if (poResponse.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); Console.WriteLine(string.Format(" [x] Object content: '{0}'", poRequest.ContentBody)); //****************************************// // 4. Delete the object (deletion marker) // //****************************************// Console.Write(string.Format(" [*] Deleting object with key '{0}' (adding a deletion marker)... ", objectKey)); DeleteObjectRequest doRequest = new DeleteObjectRequest() { BucketName = bucketName, Key = objectKey }; DeleteObjectResponse doResponse = s3.DeleteObject(doRequest); if (doResponse.HttpStatusCode != System.Net.HttpStatusCode.NoContent || doResponse.DeleteMarker != "true") { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); //*************************************************// // 5. Try to get the object (expect 404 Not Found) // //*************************************************// Console.Write(string.Format(" [*] Trying to read object with key '{0}' (expecting 404 Not Found)... ", objectKey)); GetObjectRequest goRequest = new GetObjectRequest() { BucketName = bucketName, Key = objectKey, }; try { // should throw an exception as the object is marked as deleted s3.GetObject(goRequest); Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } catch (AmazonS3Exception e) { if (e.StatusCode != System.Net.HttpStatusCode.NotFound) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } } Console.WriteLine("done (404 Not Found)"); //*************************************************************************// // 6. List the object versions and get the version ID of the first version // //*************************************************************************// Console.WriteLine(string.Format(" [*] Listing object versions for bucket '{0}' and getting version ID to restore... ", bucketName)); ListVersionsResponse lvResponse = s3.ListVersions(bucketName); if (lvResponse.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } String restoreVersion = String.Empty; foreach (S3ObjectVersion version in lvResponse.Versions) { if (version.Key != objectKey) { // filtering out other objects continue; } Console.WriteLine(string.Format(" [x] -> Object key: {0}", version.Key)); Console.WriteLine(string.Format(" [x] VersionId: {0}", version.VersionId)); Console.WriteLine(string.Format(" [x] IsDeleteMarker: {0}", version.IsDeleteMarker)); Console.WriteLine(string.Format(" [x] LastModified: {0}", version.LastModified)); if (!version.IsDeleteMarker) { restoreVersion = version.VersionId; } } if (restoreVersion.Length == 0) { Console.WriteLine(" [*] Could not find a version to restore, exiting..."); Console.ReadLine(); System.Environment.Exit(1); } //******************************************************************// // 7. Restore the first version using a server-side copy operation. // //******************************************************************// Console.Write(string.Format(" [*] Restoring object version ID '{0}' (server-side copy)... ", restoreVersion)); CopyObjectRequest coRequest = new CopyObjectRequest() { SourceBucket = bucketName, SourceKey = objectKey, SourceVersionId = restoreVersion, DestinationBucket = bucketName, DestinationKey = objectKey }; CopyObjectResponse coResponse = s3.CopyObject(coRequest); if (coResponse.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); //************************************************************// // 8. Verify that the object can now be successfully obtained // //************************************************************// Console.Write(string.Format(" [*] Trying to read object '{0}'... ", objectKey)); GetObjectResponse goResponse = s3.GetObject(goRequest); if (goResponse.HttpStatusCode != System.Net.HttpStatusCode.OK || goResponse.ContentLength != poRequest.ContentBody.Length) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); String responseBody = ""; using (Stream responseStream = goResponse.ResponseStream) using (StreamReader reader = new StreamReader(responseStream)) { responseBody = reader.ReadToEnd(); } Console.WriteLine(string.Format(" [x] Object '{0}' successfully restored. New VersionId: '{1}'. Content: '{2}'", goResponse.Key, goResponse.VersionId, responseBody)); //*******************************************// // 9. Permanently delete the object versions // //*******************************************// Console.Write(" [*] Permanently deleting all object versions... "); ListVersionsResponse lv2Response = s3.ListVersions(bucketName); if (lv2Response.HttpStatusCode != System.Net.HttpStatusCode.OK) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } foreach (S3ObjectVersion version in lv2Response.Versions) { DeleteObjectRequest do2Request = new DeleteObjectRequest() { BucketName = bucketName, Key = version.Key, VersionId = version.VersionId }; DeleteObjectResponse do2Response = s3.DeleteObject(do2Request); if (do2Response.HttpStatusCode != System.Net.HttpStatusCode.NoContent) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } } Console.WriteLine("done"); //***********************// // 10. Delete the bucket // //***********************// Console.Write(String.Format(" [*] Deleting bucket '{0}' (sleeping 5 seconds)... ", bucketName)); System.Threading.Thread.Sleep(5000); DeleteBucketResponse dbRes = s3.DeleteBucket(bucketName); if (dbRes.HttpStatusCode != System.Net.HttpStatusCode.NoContent) { Console.WriteLine("fail"); Console.ReadLine(); System.Environment.Exit(1); } Console.WriteLine("done"); Console.WriteLine(" [*] Example is completed. Press any key to exit..."); Console.ReadLine(); }
public GetObjectResponseDeserializer(GetObjectRequest getObjectRequest) : base(null) { _getObjectRequest = getObjectRequest; }
public override async Task ExecuteAsync(CancellationToken cancellationToken) { ValidateRequest(); GetObjectRequest getRequest = ConvertToGetObjectRequest(this._request); var maxRetries = ((AmazonS3Client)_s3Client).Config.MaxErrorRetry; var retries = 0; bool shouldRetry = false; string mostRecentETag = null; do { shouldRetry = false; if (retries != 0) { #if PCL ByteRange bytesRemaining = await ByteRangeRemainingForDownloadAsync(this._request.FilePath); #else ByteRange bytesRemaining = ByteRangeRemainingForDownload(this._request.FilePath); #endif getRequest.ByteRange = bytesRemaining; } try { using (var response = await this._s3Client.GetObjectAsync(getRequest, cancellationToken) .ConfigureAwait(continueOnCapturedContext: false)) { if (!string.IsNullOrEmpty(mostRecentETag) && !string.Equals(mostRecentETag, response.ETag)) { //if the eTag changed, we need to retry from the start of the file mostRecentETag = response.ETag; getRequest.ByteRange = null; retries = 0; shouldRetry = true; WaitBeforeRetry(retries); continue; } mostRecentETag = response.ETag; if (retries == 0) { /* * Wipe the local file, if it exists, to handle edge case where: * * 1. File foo exists * 2. We start trying to download, but unsuccesfully write any data * 3. We retry the download, with retires > 0, thus hitting the else statement below * 4. We will append to file foo, instead of overwriting it * * We counter it with the call below because it's the same call that would be hit * in WriteResponseStreamToFile. If any exceptions are thrown, they will be the same as before * to avoid any breaking changes to customers who handle that specific exception in a * particular manor. */ #if PCL var file = await PCLStorage.FileSystem.Current.GetFileFromPathAsync(this._request.FilePath).ConfigureAwait(false); if (file != null) { await file.DeleteAsync().ConfigureAwait(false); } #endif #if BCL using (FileStream temp = new FileStream(this._request.FilePath, FileMode.Create, FileAccess.ReadWrite, FileShare.Read, Amazon.S3.Util.S3Constants.DefaultBufferSize)) { //Do nothing. Simply using the "using" statement to create and dispose of FileStream temp in the same call. }; #endif response.WriteObjectProgressEvent += OnWriteObjectProgressEvent; await response.WriteResponseStreamToFileAsync(this._request.FilePath, false, cancellationToken) .ConfigureAwait(continueOnCapturedContext: false); } else { response.WriteObjectProgressEvent += OnWriteObjectProgressEvent; await response.WriteResponseStreamToFileAsync(this._request.FilePath, true, cancellationToken) .ConfigureAwait(continueOnCapturedContext: false); } } } catch (Exception exception) { retries++; shouldRetry = HandleExceptionForHttpClient(exception, retries, maxRetries); if (!shouldRetry) { if (exception is IOException) { throw; } else if (exception.InnerException is IOException) { ExceptionDispatchInfo.Capture(exception.InnerException).Throw(); } else if (exception is AmazonServiceException || exception is AmazonClientException) { throw; } else { throw new AmazonServiceException(exception); } } } WaitBeforeRetry(retries); } while (shouldRetry); }
/// <summary> /// Decrypt the object being downloaded. /// </summary> /// <param name="executionContext"></param> /// <param name="decryptedEnvelopeKeyKMS"></param> protected void PostInvokeSynchronous(IExecutionContext executionContext, byte[] decryptedEnvelopeKeyKMS) { var request = executionContext.RequestContext.Request; var response = executionContext.ResponseContext.Response; var initiateMultiPartUploadRequest = request.OriginalRequest as InitiateMultipartUploadRequest; var initiateMultiPartResponse = response as InitiateMultipartUploadResponse; if (initiateMultiPartResponse != null) { byte[] encryptedEnvelopeKey = initiateMultiPartUploadRequest.EncryptedEnvelopeKey; byte[] envelopeKey = initiateMultiPartUploadRequest.EnvelopeKey; byte[] iv = initiateMultiPartUploadRequest.IV; UploadPartEncryptionContext contextForEncryption = new UploadPartEncryptionContext(); contextForEncryption.StorageMode = initiateMultiPartUploadRequest.StorageMode; contextForEncryption.EncryptedEnvelopeKey = encryptedEnvelopeKey; contextForEncryption.EnvelopeKey = envelopeKey; contextForEncryption.NextIV = iv; contextForEncryption.FirstIV = iv; contextForEncryption.PartNumber = 0; //Add context for encryption of next part this.EncryptionClient.CurrentMultiPartUploadKeys.Add(initiateMultiPartResponse.UploadId, contextForEncryption); } var uploadPartRequest = request.OriginalRequest as UploadPartRequest; var uploadPartResponse = response as UploadPartResponse; if (uploadPartResponse != null) { string uploadID = uploadPartRequest.UploadId; UploadPartEncryptionContext encryptedUploadedContext = null; if (!this.EncryptionClient.CurrentMultiPartUploadKeys.TryGetValue(uploadID, out encryptedUploadedContext)) { throw new AmazonS3Exception("encryption context for multi part upload not found"); } if (uploadPartRequest.IsLastPart == false) { object stream = null; if (!((Amazon.Runtime.Internal.IAmazonWebServiceRequest)uploadPartRequest).RequestState.TryGetValue(AmazonS3EncryptionClient.S3CryptoStream, out stream)) { throw new AmazonS3Exception("cannot retrieve S3 crypto stream from request state, hence cannot get Initialization vector for next uploadPart "); } var encryptionStream = stream as AESEncryptionUploadPartStream; encryptedUploadedContext.NextIV = encryptionStream.InitializationVector; } } var getObjectResponse = response as GetObjectResponse; if (getObjectResponse != null) { if (EncryptionUtils.IsEncryptionInfoInMetadata(getObjectResponse) == true) { DecryptObjectUsingMetadata(getObjectResponse, decryptedEnvelopeKeyKMS); } else { GetObjectResponse instructionFileResponse = null; try { GetObjectRequest instructionFileRequest = EncryptionUtils.GetInstructionFileRequest(getObjectResponse); instructionFileResponse = this.EncryptionClient.S3ClientForInstructionFile.GetObject(instructionFileRequest); } catch (AmazonServiceException ace) { throw new AmazonServiceException(string.Format(CultureInfo.InvariantCulture, "Unable to decrypt data for object {0} in bucket {1}", getObjectResponse.Key, getObjectResponse.BucketName), ace); } if (EncryptionUtils.IsEncryptionInfoInInstructionFile(instructionFileResponse) == true) { DecryptObjectUsingInstructionFile(getObjectResponse, instructionFileResponse); } } } var completeMultiPartUploadRequest = request.OriginalRequest as CompleteMultipartUploadRequest; var completeMultipartUploadResponse = response as CompleteMultipartUploadResponse; if (completeMultipartUploadResponse != null) { UploadPartEncryptionContext context = this.EncryptionClient.CurrentMultiPartUploadKeys[completeMultiPartUploadRequest.UploadId]; if (context.StorageMode == CryptoStorageMode.InstructionFile) { byte[] envelopeKey = context.EnvelopeKey; byte[] iv = context.FirstIV; byte[] encryptedEnvelopeKey = context.EncryptedEnvelopeKey; EncryptionInstructions instructions = new EncryptionInstructions(EncryptionClient.EncryptionMaterials.MaterialsDescription, envelopeKey, encryptedEnvelopeKey, iv); PutObjectRequest instructionFileRequest = EncryptionUtils.CreateInstructionFileRequest(completeMultiPartUploadRequest, instructions); this.EncryptionClient.S3ClientForInstructionFile.PutObject(instructionFileRequest); } //Clear Context data since encryption is completed this.EncryptionClient.CurrentMultiPartUploadKeys.Clear(); } }
/// <summary> /// Download from amazon. /// </summary> /// <param name="destinationPath">destination path string</param> /// <param name="fileName">file name string</param> /// <param name="contentType"><file type i.e. Audio Video </param> /// <param name="bucketName">bucketname</param> /// <returns>true or false</returns> public static bool DownloadFromAmazon(string destinationPath, string fileName, int contentType, string bucketName = "mybucketname") { try { string fileType = string.Empty; AmazonS3Config config = new AmazonS3Config(); config.ServiceURL = serviceUrl; // Attachment type according to content Type sent in the request. switch (contentType) { case 1: fileType = "File"; break; case 2: fileType = "Audio"; break; case 3: fileType = "Video"; break; case 4: fileType = "Link"; break; default: fileType = "File"; break; } client = Amazon.AWSClientFactory.CreateAmazonS3Client( accessKey, secretKey, config ); // Key name consists of the path of the folder where we want to save the attachments on amazon storage. string keyName = "/messagefiles/" + fileType + "/" + fileName; using (client) { GetObjectRequest request = new GetObjectRequest(); request.BucketName = bucketName; request.Key = keyName; using (GetObjectResponse response = client.GetObject(request)) { response.WriteResponseStreamToFile(destinationPath); } } return(true); } catch (AmazonS3Exception amazonS3Exception) { // Catch amazon exception. //LogHelper.Error("DownloadFromAmazon", amazonS3Exception); return(false); } catch (Exception ex) { // Catch general exception. //LogHelper.Error("DownloadFromAmazon", ex); return(false); } }
public byte[] UploadFileAsChunks(string filePath) { string s3objectName; List <ChunkInfo> chunkList_cloud = new List <ChunkInfo>();; // list of chunk indexed by chunk-index (e.g. 0, 1, 2,....) List <ChunkInfo> chunkList_local; // list of chunk indexed by chunk-index (e.g. 0, 1, 2,....) try { if (logger != null) { logger.Log("Start Synchronizer Check Blob Exists"); } s3objectName = Path.GetFileName(filePath); bool s3ObjectExists = S3ObjectExists(ChunkMetadataObjectPrefix + s3objectName); if (logger != null) { logger.Log("End Synchronizer Check Blob Exists"); } if (s3ObjectExists) { if (logger != null) { logger.Log("Start Synchronizer Fill Remote ChunkList"); } GetObjectRequest request = new GetObjectRequest(); request.WithBucketName(bucketName); request.WithKey(ChunkMetadataObjectPrefix + s3objectName); GetObjectResponse response = amazonS3Client.GetObject(request); StreamReader reader = new StreamReader(response.ResponseStream); string chunkMD_JSON = reader.ReadToEnd(); FileMD fileMD = JsonConvert.DeserializeObject <FileMD>(chunkMD_JSON); StaticChunkSize = fileMD.StaticChunkSize; chunkList_cloud = fileMD.ChunkList; if (logger != null) { logger.Log("End Synchronizer Fill Remote ChunkList"); } chunkCompressionType = SyncFactory.GetCompressionType(fileMD.compressionType); chunkEncryptionType = SyncFactory.GetEncryptionType(fileMD.encryptionType); } if (logger != null) { logger.Log("Start Synchronizer Fill Local ChunkList"); } StaticChunk staticChunker = new StaticChunk(StaticChunkSize); chunkList_local = staticChunker.GetCurrentChunkList(filePath); // if doing other class that implements the IChunk interface // structuredLog("I", "Number of chunks locally: " + chunkList_local.Count); if (logger != null) { logger.Log("End Synchronizer Fill Local ChunkList"); } if (logger != null) { logger.Log("Start Synchronizer ChunkList Compare"); } List <ChunkInfo> chunkList_toUpload = staticChunker.GetUploadChunkList(chunkList_local, chunkList_cloud); // structuredLog("I", "Number of chunks on cloud blob: " + chunkList_cloud.Count); // structuredLog("I", "Number of chunks to be uploaded: " + chunkList_toUpload.Count); if (logger != null) { logger.Log("End Synchronizer ChunkList Compare"); } if (logger != null) { logger.Log("Start Synchronizer Upload Multiple Chunks"); } UploadChunkList(ref chunkList_toUpload, filePath, s3objectName); if (logger != null) { logger.Log("End Synchronizer Upload Multiple Chunks"); } // structuredLog("I", "Number of chunks uploaded: " + chunkList_toUpload.Count); if (logger != null) { logger.Log("Start Synchronizer ChunkList Upload"); } string json = JsonConvert.SerializeObject(new FileMD(StaticChunkSize, chunkList_local, SyncFactory.GetCompressionTypeAsString(this.chunkCompressionType), SyncFactory.GetEncryptionTypeAsString(this.chunkEncryptionType)), new KeyValuePairConverter()); if (chunkList_toUpload.Count > 0) //upload new chunk list only if we uploaded some new chunks { UploadStringToS3Object(ChunkMetadataObjectPrefix + s3objectName, json); } SHA1 sha1 = new SHA1CryptoServiceProvider(); byte[] ret = sha1.ComputeHash(Encoding.ASCII.GetBytes(json)); if (logger != null) { logger.Log("End Synchronizer ChunkList Upload"); } return(ret); } catch (Exception e) { structuredLog("E", " . UploadFileAsChunks: " + e); return(null); } }
public static async Task LoadAsync() { var masterDataPath = new MasterDataPath(); if (false == await masterDataPath.FetchAsync()) { Logger.Logging(new LogObj().AddChild(new LogModels.ErrorReport { Msg = "Not found MasterDataPath@Kvs!", })); return; } if ((LatestMasterData != null && LatestMasterData.VersionStr == masterDataPath.Model.version) && masterDataPath.Model.updateDate == LatestMasterDataUpdateDate) { // Already Loaded return; } var path = ""; var rawJson = ""; if (!string.IsNullOrEmpty(masterDataPath.Model.s3KeyPlain)) { try { path = $"{DeliveryDataInfo.S3BucketName}/{masterDataPath.Model.s3KeyPlain}"; Logger.Logging(new LogObj().AddChild(new LogModels.MasterDataStartLoading { Path = path, UpdateDate = masterDataPath.Model.updateDate, Date = DateTime.UtcNow, })); var s3Client = new AmazonS3Client( Amazon.RegionEndpoint.GetBySystemName(DeliveryDataInfo.S3BucketRegion) ); var getReq = new GetObjectRequest { BucketName = DeliveryDataInfo.S3BucketName, Key = masterDataPath.Model.s3KeyPlain, }; using (var getRes = await s3Client.GetObjectAsync(getReq)) { using (var reader = new System.IO.StreamReader(getRes.ResponseStream)) { rawJson = await reader.ReadToEndAsync(); } } } catch (Exception ex) { return; } } else { path = $"{masterDataPath.Model.pathPlain}"; Logger.Logging(new LogObj().AddChild(new LogModels.MasterDataStartLoading { Path = path, UpdateDate = masterDataPath.Model.updateDate, Date = DateTime.UtcNow, })); var requester = new JsonSrcRequester(); var response = await requester.GetAsync(masterDataPath.Model.pathPlain); if (response.StatusCode != System.Net.HttpStatusCode.OK) { Logger.Logging(new LogObj().AddChild(new LogModels.ErrorReport { Msg = $"Not found JsonFile at \"{masterDataPath.Model.pathPlain}\".", })); return; } rawJson = response.Payload; } var masterData = await ParseAsync( rawJson, masterDataPath.Model.pathEncrypt, masterDataPath.Model.pathPlain); LatestMasterData = masterData; LatestMasterDataUpdateDate = masterDataPath.Model.updateDate; Logger.Logging(new LogObj().AddChild(new LogModels.MasterDataEndLoading { Path = path, UpdateDate = masterDataPath.Model.updateDate, Date = DateTime.UtcNow, })); }
public void ObjectSamples() { { #region ListObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // List all objects ListObjectsRequest listRequest = new ListObjectsRequest { BucketName = "SampleBucket", }; ListObjectsResponse listResponse; do { // Get a list of objects listResponse = client.ListObjects(listRequest); foreach (S3Object obj in listResponse.S3Objects) { Console.WriteLine("Object - " + obj.Key); Console.WriteLine(" Size - " + obj.Size); Console.WriteLine(" LastModified - " + obj.LastModified); Console.WriteLine(" Storage class - " + obj.StorageClass); } // Set the marker property listRequest.Marker = listResponse.NextMarker; } while (listResponse.IsTruncated); #endregion } { #region GetObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObject request GetObjectRequest request = new GetObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and remember to dispose of the response using (GetObjectResponse response = client.GetObject(request)) { using (StreamReader reader = new StreamReader(response.ResponseStream)) { string contents = reader.ReadToEnd(); Console.WriteLine("Object - " + response.Key); Console.WriteLine(" Version Id - " + response.VersionId); Console.WriteLine(" Contents - " + contents); } } #endregion } { #region GetObject WriteResponseStreamToFile Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObject request GetObjectRequest request = new GetObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and remember to dispose of the response using (GetObjectResponse response = client.GetObject(request)) { // Save object to local file response.WriteResponseStreamToFile("Item1.txt"); } #endregion } { #region GetObjectMetadata Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObjectMetadata request GetObjectMetadataRequest request = new GetObjectMetadataRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and view the response GetObjectMetadataResponse response = client.GetObjectMetadata(request); Console.WriteLine("Content Length - " + response.ContentLength); Console.WriteLine("Content Type - " + response.Headers.ContentType); if (response.Expiration != null) { Console.WriteLine("Expiration Date - " + response.Expiration.ExpiryDate); Console.WriteLine("Expiration Rule Id - " + response.Expiration.RuleId); } #endregion } { #region PutObject Sample 1 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", ContentBody = "This is sample content..." }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 2 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", FilePath = "contents.txt" }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 3 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", }; using (FileStream stream = new FileStream("contents.txt", FileMode.Open)) { request.InputStream = stream; // Put object PutObjectResponse response = client.PutObject(request); } #endregion } { #region DeleteObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectRequest request = new DeleteObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request client.DeleteObject(request); #endregion } { #region DeleteObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectsRequest request = new DeleteObjectsRequest { BucketName = "SampleBucket", Objects = new List <KeyVersion> { new KeyVersion() { Key = "Item1" }, // Versioned item new KeyVersion() { Key = "Item2", VersionId = "Rej8CiBxcZKVK81cLr39j27Y5FVXghDK", }, // Item in subdirectory new KeyVersion() { Key = "Logs/error.txt" } } }; try { // Issue request DeleteObjectsResponse response = client.DeleteObjects(request); } catch (DeleteObjectsException doe) { // Catch error and list error details DeleteObjectsResponse errorResponse = doe.Response; foreach (DeletedObject deletedObject in errorResponse.DeletedObjects) { Console.WriteLine("Deleted item " + deletedObject.Key); } foreach (DeleteError deleteError in errorResponse.DeleteErrors) { Console.WriteLine("Error deleting item " + deleteError.Key); Console.WriteLine(" Code - " + deleteError.Code); Console.WriteLine(" Message - " + deleteError.Message); } } #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region ListVersions Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Turn versioning on for a bucket client.PutBucketVersioning(new PutBucketVersioningRequest { BucketName = "SampleBucket", VersioningConfig = new S3BucketVersioningConfig { Status = "Enable" } }); // Populate bucket with multiple items, each with multiple versions PopulateBucket(client, "SampleBucket"); // Get versions ListVersionsRequest request = new ListVersionsRequest { BucketName = "SampleBucket" }; // Make paged ListVersions calls ListVersionsResponse response; do { response = client.ListVersions(request); // View information about versions foreach (var version in response.Versions) { Console.WriteLine("Key = {0}, Version = {1}, IsLatest = {2}, LastModified = {3}, Size = {4}", version.Key, version.VersionId, version.IsLatest, version.LastModified, version.Size); } request.KeyMarker = response.NextKeyMarker; request.VersionIdMarker = response.NextVersionIdMarker; } while (response.IsTruncated); #endregion } { #region Multipart Upload Sample int MB = (int)Math.Pow(2, 20); // Create a client AmazonS3Client client = new AmazonS3Client(); // Define input stream Stream inputStream = Create13MBDataStream(); // Initiate multipart upload InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1" }; InitiateMultipartUploadResponse initResponse = client.InitiateMultipartUpload(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up1Response = client.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up2Response = client.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream }; UploadPartResponse up3Response = client.UploadPart(uploadRequest); // List parts for current upload ListPartsRequest listPartRequest = new ListPartsRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = client.ListParts(listPartRequest); Debug.Assert(listPartResponse.Parts.Count == 3); // Complete the multipart upload CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartETags = new List <PartETag> { new PartETag { ETag = up1Response.ETag, PartNumber = 1 }, new PartETag { ETag = up2Response.ETag, PartNumber = 2 }, new PartETag { ETag = up3Response.ETag, PartNumber = 3 } } }; CompleteMultipartUploadResponse compResponse = client.CompleteMultipartUpload(compRequest); #endregion } }
public void MultipartEncryptionTestInstructionFile() { string filePath = @"C:\temp\Upload15MegFileIn3PartsViaStream.txt"; string retrievedFilepath = @"C:\temp\Upload15MegFileIn3PartsViaStreamRetreived.txt"; int MEG_SIZE = (int)Math.Pow(2, 20) + 4001; long totalSize = (long)(15 * MEG_SIZE); UtilityMethods.GenerateFile(filePath, totalSize); string key = "MultipartEncryptionTestInstrcutionFile" + random.Next(); s3EncryptionClientFileMode.PutBucket(new PutBucketRequest() { BucketName = bucketName }); Stream inputStream = File.OpenRead(filePath); try { InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest() { BucketName = bucketName, Key = key, StorageClass = S3StorageClass.ReducedRedundancy, ContentType = "text/html", CannedACL = S3CannedACL.PublicRead }; InitiateMultipartUploadResponse initResponse = s3EncryptionClientFileMode.InitiateMultipartUpload(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MEG_SIZE, InputStream = inputStream }; UploadPartResponse up1Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MEG_SIZE + 4001, InputStream = inputStream }; UploadPartResponse up2Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream, IsLastPart = true }; //uploadRequest.setLastPart(); UploadPartResponse up3Response = s3EncryptionClientFileMode.UploadPart(uploadRequest); ListPartsRequest listPartRequest = new ListPartsRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = s3EncryptionClientFileMode.ListParts(listPartRequest); Assert.AreEqual(3, listPartResponse.Parts.Count); Assert.AreEqual(up1Response.PartNumber, listPartResponse.Parts[0].PartNumber); Assert.AreEqual(up1Response.ETag, listPartResponse.Parts[0].ETag); Assert.AreEqual(up2Response.PartNumber, listPartResponse.Parts[1].PartNumber); Assert.AreEqual(up2Response.ETag, listPartResponse.Parts[1].ETag); Assert.AreEqual(up3Response.PartNumber, listPartResponse.Parts[2].PartNumber); Assert.AreEqual(up3Response.ETag, listPartResponse.Parts[2].ETag); listPartRequest.MaxParts = 1; listPartResponse = s3EncryptionClientFileMode.ListParts(listPartRequest); Assert.AreEqual(1, listPartResponse.Parts.Count); // Complete the response CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest() { BucketName = bucketName, Key = key, UploadId = initResponse.UploadId }; compRequest.AddPartETags(up1Response, up2Response, up3Response); CompleteMultipartUploadResponse compResponse = s3EncryptionClientFileMode.CompleteMultipartUpload(compRequest); Assert.AreEqual(bucketName, compResponse.BucketName); Assert.IsNotNull(compResponse.ETag); Assert.AreEqual(key, compResponse.Key); Assert.IsNotNull(compResponse.Location); // Get the file back from S3 and make sure it is still the same. GetObjectRequest getRequest = new GetObjectRequest() { BucketName = bucketName, Key = key }; GetObjectResponse getResponse = s3EncryptionClientFileMode.GetObject(getRequest); getResponse.WriteResponseStreamToFile(retrievedFilepath); UtilityMethods.CompareFiles(filePath, retrievedFilepath); GetObjectMetadataRequest metaDataRequest = new GetObjectMetadataRequest() { BucketName = bucketName, Key = key }; GetObjectMetadataResponse metaDataResponse = s3EncryptionClientFileMode.GetObjectMetadata(metaDataRequest); Assert.AreEqual("text/html", metaDataResponse.Headers.ContentType); } finally { inputStream.Close(); if (File.Exists(filePath)) { File.Delete(filePath); } if (File.Exists(retrievedFilepath)) { File.Delete(retrievedFilepath); } } }
static void Main(string[] args) { //Connect TcpClient tcpClient = new TcpClient("localhost", 3334); MessageSender messageSender = new MessageSender(tcpClient.Session); MessageReceiver messageReceiver = new MessageReceiver(tcpClient.Session); MessageExecutor messageExecutor = new MessageExecutor(messageSender, messageReceiver, new InstantTaskScheduler()); messageExecutor.Configuration.DefaultTimeout = 10000; var notificationListener = new NotificationListener(); messageReceiver.AddListener(-1, notificationListener); //auth AuthorizeHciRequest request = new AuthorizeHciRequest(); request.ClientId = -1; request.Locale = "en-US"; var future = messageExecutor.Submit <AuthorizeHciResponse>(request); future.Wait(); AuthorizeHciResponse AuthorizeHciResponse = future.Value; int clientId = AuthorizeHciResponse.ClientId; System.Console.WriteLine("AuthorizeHciResponse precessed"); //login LoginRequest loginRequest = new LoginRequest(); loginRequest.UserLogin = "******"; loginRequest.UserPassword = "******"; loginRequest.ClientId = clientId; var loginResponcetask = messageExecutor.Submit <LoginResponse>(loginRequest); loginResponcetask.Wait(); //Lock vehicle example AcquireLockRequest lockRequest = new AcquireLockRequest { ClientId = clientId, ObjectType = "Vehicle", ObjectId = 2 }; var resultLock = messageExecutor.Submit <AcquireLockResponse>(lockRequest); resultLock.Wait(); // Click&Go example var sendCommandRequest = new SendCommandRequest { ClientId = clientId, Command = new UGCS.Sdk.Protocol.Encoding.Command { Code = "waypoint", Subsystem = Subsystem.S_FLIGHT_CONTROLLER } }; sendCommandRequest.Command.Arguments.AddRange(new CommandArgument[] { new CommandArgument { Code = "latitude", Value = new Value { DoubleValue = 0.994445232147517 } }, new CommandArgument { Code = "longitude", Value = new Value { DoubleValue = 0.4201742565140717 } }, new CommandArgument { Code = "altitude_agl", Value = new Value { DoubleValue = 5.0 } }, new CommandArgument { Code = "ground_speed", Value = new Value { DoubleValue = 5.0 } }, new CommandArgument { Code = "heading", Value = new Value { DoubleValue = 0.017453292519943295 } } }); sendCommandRequest.Vehicles.Add(new Vehicle { Id = 2 }); var sendCommandResponse = messageExecutor.Submit <SendCommandResponse>(sendCommandRequest); sendCommandResponse.Wait(); System.Console.WriteLine("Click&Go command sent"); //Import mission var byteArray = File.ReadAllBytes("Demo mission.xml"); ImportMissionFromXmlRequest importMissionRequest = new ImportMissionFromXmlRequest() { ClientId = clientId, MissionXml = byteArray }; var importMissionResponse = messageExecutor.Submit <ImportMissionFromXmlResponse>(importMissionRequest); importMissionResponse.Wait(); //mission contains imported mission from Demo mission.xml var mission = importMissionResponse.Value.Mission; System.Console.WriteLine("Demo mission.xml imported to UCS with name '{0}'", mission.Name); //Get mission from server GetObjectRequest getMissionObjectRequest = new GetObjectRequest() { ClientId = clientId, ObjectType = "Mission", ObjectId = mission.Id, RefreshDependencies = true }; var getMissionObjectResponse = messageExecutor.Submit <GetObjectResponse>(getMissionObjectRequest); getMissionObjectResponse.Wait(); //missionFromUcs contains retrieved mission var missionFromUcs = getMissionObjectResponse.Value.Object.Mission; System.Console.WriteLine("mission id '{0}' retrieved from UCS with name '{1}'", mission.Id, missionFromUcs.Name); //Import route var byteArrayRoute = File.ReadAllBytes("Demo route for Copter.xml"); ImportRouteRequest importRouteRequest = new ImportRouteRequest() { ClientId = clientId, RouteData = byteArrayRoute, Filename = "Demo route for Copter.xml" }; var importRouteResponse = messageExecutor.Submit <ImportRouteResponse>(importRouteRequest); importRouteResponse.Wait(); //importedRoute contains imported route from Demo route for Copter.xml var importedRoute = importRouteResponse.Value.Route; System.Console.WriteLine("Demo route for Copter.xml imported to UCS with name '{0}'", importedRoute.Name); //Add vehicle profile to route GetObjectRequest requestVehicle = new GetObjectRequest() { ClientId = clientId, ObjectType = "Vehicle", ObjectId = 1, //EMU-COPTER-17 RefreshDependencies = true }; var responseVehicle = messageExecutor.Submit <GetObjectResponse>(requestVehicle); responseVehicle.Wait(); importedRoute.VehicleProfile = responseVehicle.Value.Object.Vehicle.Profile; //Add route to mission importedRoute.Mission = missionFromUcs; //Save route on server CreateOrUpdateObjectRequest routeSaveRequest = new CreateOrUpdateObjectRequest() { ClientId = clientId, Object = new DomainObjectWrapper().Put(importedRoute, "Route"), WithComposites = true, ObjectType = "Route", AcquireLock = false }; var updateRouteTask = messageExecutor.Submit <CreateOrUpdateObjectResponse>(routeSaveRequest); updateRouteTask.Wait(); System.Console.WriteLine("route '{0}' added to mission '{1}'", updateRouteTask.Value.Object.Route.Name, missionFromUcs.Name); //Get route from server GetObjectRequest getRouteObjectRequest = new GetObjectRequest() { ClientId = clientId, ObjectType = "Route", ObjectId = updateRouteTask.Value.Object.Route.Id, RefreshDependencies = true }; var geRouteObjectResponse = messageExecutor.Submit <GetObjectResponse>(getRouteObjectRequest); geRouteObjectResponse.Wait(); //routeFromUcs contains retrieved route var routeFromUcs = geRouteObjectResponse.Value.Object.Route; System.Console.WriteLine(string.Format("route id '{0}' retrieved from UCS with name '{1}'", updateRouteTask.Value.Object.Route.Id, routeFromUcs.Name)); //add action to route ActionDefinition actionDefenition = new ActionDefinition(); actionDefenition.HeadingDefinition = new HeadingDefinition(); actionDefenition.HeadingDefinition.Heading = 1.57079633; // 90 degrees actionDefenition.HeadingDefinition.RelativeToNorth = true; if (routeFromUcs.Segments.Count > 2) { routeFromUcs.Segments[1].ActionDefinitions.Add(actionDefenition); } System.Console.WriteLine(string.Format("action to route '{0}'", routeFromUcs.Name)); //save route CreateOrUpdateObjectRequest createOrUpdateRouteRequest = new CreateOrUpdateObjectRequest() { ClientId = clientId, Object = new DomainObjectWrapper().Put(routeFromUcs, "Route"), WithComposites = true, ObjectType = "Route", AcquireLock = false }; var createOrUpdateRouteResponseTask = messageExecutor.Submit <CreateOrUpdateObjectResponse>(createOrUpdateRouteRequest); createOrUpdateRouteResponseTask.Wait(); if (createOrUpdateRouteResponseTask.Value != null) { System.Console.WriteLine(string.Format("'{0}' route updated on UCS", routeFromUcs.Name)); } else { System.Console.WriteLine(string.Format("fail to update route '{0}' on UCS", routeFromUcs.Name)); } //Get all vehicles GetObjectListRequest getObjectListRequest = new GetObjectListRequest() { ClientId = clientId, ObjectType = "Vehicle", RefreshDependencies = true }; getObjectListRequest.RefreshExcludes.Add("PayloadProfile"); getObjectListRequest.RefreshExcludes.Add("Route"); var task = messageExecutor.Submit <GetObjectListResponse>(getObjectListRequest); task.Wait(); var list = task.Value; foreach (var v in list.Objects) { System.Console.WriteLine(string.Format("name: {0}; id: {1}; type: {2}", v.Vehicle.Name, v.Vehicle.Id, v.Vehicle.Type.ToString())); } Vehicle vehicle = task.Value.Objects.FirstOrDefault().Vehicle; // Payload control SendCommandRequest requestPaload = new SendCommandRequest { ClientId = clientId, Command = new UGCS.Sdk.Protocol.Encoding.Command { Code = "direct_payload_control", Subsystem = Subsystem.S_GIMBAL, Silent = true, ResultIndifferent = true } }; requestPaload.Vehicles.Add(new Vehicle() { Id = vehicle.Id }); List <CommandArgument> listCommands = new List <CommandArgument>(); listCommands.Add(new CommandArgument { Code = "roll", Value = new Value() { DoubleValue = 1 } }); listCommands.Add(new CommandArgument { Code = "pitch", Value = new Value() { DoubleValue = 0 } }); listCommands.Add(new CommandArgument { Code = "yaw", Value = new Value() { DoubleValue = 0 } }); listCommands.Add(new CommandArgument { Code = "zoom", Value = new Value() { DoubleValue = 0 } }); requestPaload.Command.Arguments.AddRange(listCommands); var resultPayload = messageExecutor.Submit <SendCommandResponse>(requestPaload); resultPayload.Wait(); //update vehicle object CreateOrUpdateObjectRequest createOrUpdateObjectRequest = new CreateOrUpdateObjectRequest() { ClientId = clientId, Object = new DomainObjectWrapper().Put(vehicle, "Vehicle"), WithComposites = true, ObjectType = "Vehicle", AcquireLock = false }; var createOrUpdateObjectResponseTask = messageExecutor.Submit <CreateOrUpdateObjectResponse>(createOrUpdateObjectRequest); createOrUpdateObjectResponseTask.Wait(); //Vehicle notification subscription var eventSubscriptionWrapper = new EventSubscriptionWrapper(); eventSubscriptionWrapper.ObjectModificationSubscription = new ObjectModificationSubscription(); eventSubscriptionWrapper.ObjectModificationSubscription.ObjectId = vehicle.Id; eventSubscriptionWrapper.ObjectModificationSubscription.ObjectType = "Vehicle"; SubscribeEventRequest requestEvent = new SubscribeEventRequest(); requestEvent.ClientId = clientId; requestEvent.Subscription = eventSubscriptionWrapper; var responce = messageExecutor.Submit <SubscribeEventResponse>(requestEvent); responce.Wait(); var subscribeEventResponse = responce.Value; SubscriptionToken st = new SubscriptionToken(subscribeEventResponse.SubscriptionId, ( (notification) => { //Vehicle notification } ), eventSubscriptionWrapper); notificationListener.AddSubscription(st); // Get Telemetry for vehicle DateTime utcTime = DateTime.Now.ToUniversalTime(); DateTime posixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); TimeSpan span = utcTime - posixEpoch; var beginningMilliseconds = (long)span.TotalMilliseconds; GetTelemetryRequest telemetryRequest = new GetTelemetryRequest { ClientId = clientId, FromTime = beginningMilliseconds, Limit = 10, Vehicle = new Vehicle() { Id = 1 } }; var responseTelemetry = messageExecutor.Submit <GetTelemetryResponse>(telemetryRequest); responseTelemetry.Wait(); //Go to manual mode SendCommandRequest manualModeCommand = new SendCommandRequest { ClientId = clientId, Command = new UGCS.Sdk.Protocol.Encoding.Command { Code = "manual", Subsystem = Subsystem.S_FLIGHT_CONTROLLER, Silent = false, ResultIndifferent = false } }; manualModeCommand.Vehicles.Add(new Vehicle() { Id = 2 }); var manualMode = messageExecutor.Submit <SendCommandResponse>(manualModeCommand); manualMode.Wait(); //Go to joystick mode SendCommandRequest joystickModeCommand = new SendCommandRequest { ClientId = clientId, Command = new UGCS.Sdk.Protocol.Encoding.Command { Code = "joystick", Subsystem = Subsystem.S_FLIGHT_CONTROLLER, Silent = false, ResultIndifferent = false } }; joystickModeCommand.Vehicles.Add(new Vehicle() { Id = 2 }); var joystickMode = messageExecutor.Submit <SendCommandResponse>(joystickModeCommand); joystickMode.Wait(); // Vehicle control in joystick mode SendCommandRequest vehicleJoystickControl = new SendCommandRequest { ClientId = clientId, Command = new UGCS.Sdk.Protocol.Encoding.Command { Code = "direct_vehicle_control", Subsystem = Subsystem.S_FLIGHT_CONTROLLER, Silent = true, ResultIndifferent = true } }; vehicleJoystickControl.Vehicles.Add(new Vehicle() { Id = 2 }); //List of current joystick values to send to vehicle. List <CommandArgument> listJoystickCommands = new List <CommandArgument>(); listJoystickCommands.Add(new CommandArgument { Code = "roll", Value = new Value() { DoubleValue = 0 } }); listJoystickCommands.Add(new CommandArgument { Code = "pitch", Value = new Value() { DoubleValue = 0 } }); listJoystickCommands.Add(new CommandArgument { Code = "yaw", Value = new Value() { DoubleValue = 0 } }); listJoystickCommands.Add(new CommandArgument { Code = "throttle", Value = new Value() { DoubleValue = 1 } }); vehicleJoystickControl.Command.Arguments.AddRange(listJoystickCommands); for (int i = 1; i < 11; i++) { var sendJoystickCommandResponse = messageExecutor.Submit <SendCommandResponse>(vehicleJoystickControl); resultPayload.Wait(); System.Console.WriteLine("Joystick command to go UP {0}", i); Thread.Sleep(1000); } //TelemetrySubscription var telemetrySubscriptionWrapper = new EventSubscriptionWrapper(); telemetrySubscriptionWrapper.TelemetrySubscription = new TelemetrySubscription(); SubscribeEventRequest requestTelemetryEvent = new SubscribeEventRequest(); requestTelemetryEvent.ClientId = clientId; requestTelemetryEvent.Subscription = telemetrySubscriptionWrapper; var responceTelemetry = messageExecutor.Submit <SubscribeEventResponse>(requestTelemetryEvent); responceTelemetry.Wait(); var subscribeEventResponseTelemetry = responceTelemetry.Value; SubscriptionToken stTelemetry = new SubscriptionToken(subscribeEventResponseTelemetry.SubscriptionId, ( (notification) => { foreach (var t in notification.Event.TelemetryEvent.Telemetry) { System.Console.WriteLine("Vehicle id: {0} Type: {1} Value {2}", t.Vehicle.Id, t.Type.ToString(), t.Value); } } ), telemetrySubscriptionWrapper); notificationListener.AddSubscription(stTelemetry); //Log notification subscription var logSubscriptionWrapper = new EventSubscriptionWrapper(); logSubscriptionWrapper.ObjectModificationSubscription = new ObjectModificationSubscription(); logSubscriptionWrapper.ObjectModificationSubscription.ObjectType = "VehicleLogEntry"; SubscribeEventRequest requestLogEvent = new SubscribeEventRequest(); requestLogEvent.ClientId = clientId; requestLogEvent.Subscription = logSubscriptionWrapper; var responceLog = messageExecutor.Submit <SubscribeEventResponse>(requestLogEvent); var subscribeEventResponseLog = responceLog.Value; SubscriptionToken stLog = new SubscriptionToken(subscribeEventResponseLog.SubscriptionId, ( (notification) => { var eventType = notification.Event.ObjectModificationEvent.ModificationType; var eventLog = notification.Event.ObjectModificationEvent.Object.VehicleLogEntry; if (eventType == ModificationType.MT_CREATE) { DateTime start = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc); DateTime date = start.AddMilliseconds(eventLog.Time).ToLocalTime(); var command = eventLog.CommandArguments != null ? eventLog.CommandArguments.CommandCode : string.Empty; System.Console.WriteLine("LOG: {0} Vehicle id: {1} Command: {2} Message: {3}", date.ToString("HH:mm:ss"), eventLog.Vehicle.Id, command, eventLog.Message); } }), logSubscriptionWrapper); notificationListener.AddSubscription(stLog); //Object notification subscription, subscribe for mission changed var missionObjectSubscriptionWrapper = new EventSubscriptionWrapper(); missionObjectSubscriptionWrapper.ObjectModificationSubscription = new ObjectModificationSubscription(); missionObjectSubscriptionWrapper.ObjectModificationSubscription.ObjectType = "Mission"; SubscribeEventRequest requestMissionEvent = new SubscribeEventRequest(); requestMissionEvent.ClientId = clientId; requestMissionEvent.Subscription = missionObjectSubscriptionWrapper; var responceMission = messageExecutor.Submit <SubscribeEventResponse>(requestMissionEvent); var subscribeEventResponseMission = responceMission.Value; SubscriptionToken stMission = new SubscriptionToken(subscribeEventResponseMission.SubscriptionId, ( (notification) => { var eventType = notification.Event.ObjectModificationEvent.ModificationType; var missionObject = notification.Event.ObjectModificationEvent.Object.Mission; if (eventType == ModificationType.MT_UPDATE) { System.Console.WriteLine("Mission id: {0} updated", missionObject.Id); } }), missionObjectSubscriptionWrapper); notificationListener.AddSubscription(stMission); System.Console.ReadKey(); tcpClient.Close(); messageSender.Cancel(); messageReceiver.Cancel(); messageExecutor.Close(); notificationListener.Dispose(); }
public void S3ObjectLambdaBucketFieldInputTests(string bucketFieldInput, string clientRegion, string additionalFlags, string useArnRegion, string endpointUrl, string expectedEndpoint) { Console.WriteLine(string.Join(" | ", bucketFieldInput, clientRegion, additionalFlags, useArnRegion, endpointUrl, expectedEndpoint)); Console.WriteLine(); // ARRANGE // expectedEndpoint can be overloaded with the expected error message var expectSuccess = expectedEndpoint.Contains("amazonaws.com") || expectedEndpoint.Contains("my-endpoint.com"); // outputs to assert against: IRequest s3Request = null; Exception exception = null; var request = new GetObjectRequest { BucketName = bucketFieldInput, Key = "foo.txt" }; var config = new AmazonS3Config { UseArnRegion = useArnRegion == "" ? false : bool.Parse(useArnRegion), RegionEndpoint = clientRegion == "" ? null : RegionEndpoint.GetBySystemName(clientRegion) }; if (!string.IsNullOrWhiteSpace(endpointUrl)) { if (!endpointUrl.ToLower().StartsWith("https://")) { endpointUrl = $"https://{endpointUrl}"; } config.ServiceURL = endpointUrl; } if (additionalFlags.Contains("dualstack")) { config.UseDualstackEndpoint = true; } if (additionalFlags.Contains("fips")) { config.UseFIPSEndpoint = true; } if (additionalFlags.Contains("accelerate")) { config.UseAccelerateEndpoint = true; } // ACT try { s3Request = S3ArnTestUtils.RunMockRequest(request, GetObjectRequestMarshaller.Instance, config); Console.WriteLine(s3Request.Endpoint.ToString()); Console.WriteLine(); } catch (Exception e) { exception = e; } // ASSERT if (expectSuccess) { Assert.IsNull(exception, exception?.Message); Assert.IsNotNull(s3Request); AssertExtensions.UrlSuffixMatches(expectedEndpoint, s3Request.Endpoint); } else { Assert.IsNull(s3Request, s3Request?.Endpoint.ToString()); Assert.IsNotNull(exception); // reminder, expectedEndpoint also contains expected error message. AssertExtensions.AssertAreSameWithEmbellishments(expectedEndpoint, exception.Message); } }
public async Task <APIGatewayProxyResponse> FunctionHandler(APIGatewayProxyRequest apigProxyEvent, ILambdaContext context) { int rowsProcessed = 0; Func <Row, object> mapFunction = value => { var obj = new { _id = value._id, index = value.index, guid = value.guid, isActive = value.isActive, balance = value.balance, picture = value.picture, age = value.age, eyeColor = value.eyeColor, name = value.name, gender = value.gender, company = value.company, email = value.email, phone = value.phone, address = value.address, about = value.about, registered = value.registered, latitude = value.latitude, longitude = value.longitude, tags = value.tags, friends = value.friends, greeting = value.greeting, favoriteFruit = value.favoriteFruit }; return(obj); }; Func <dynamic, bool> filterPredicate1 = value => { return(value.eyeColor == "green"); }; Func <dynamic, bool> filterPredicate2 = value => { return(value.age > 15); }; Func <IEnumerable, IEnumerable> pipeline = Activities.pipelineMaker( Activities.mapMaker <Row, dynamic>(mapFunction), Activities.filterMaker(filterPredicate1), Activities.filterMaker(filterPredicate2) ); // IEnumerable<Row> humans; try { GetObjectRequest request = new GetObjectRequest { BucketName = "fyp-test-aws", Key = "random-personal-info5.json" }; using (GetObjectResponse response = await S3Client.GetObjectAsync(request)) using (Stream inStream = response.ResponseStream) using (StreamReader reader = new StreamReader(inStream)) using (JsonReader r = new JsonTextReader(reader)) { string title = response.Metadata["x-amz-meta-title"]; // Assume you have "title" as medata added to the object. string contentType = response.Headers["Content-Type"]; Console.WriteLine("Object metadata, Title: {0}", title); Console.WriteLine("Content type: {0}", contentType); IEnumerable <Row> iterator = r.SelectTokensWithRegex <Row>(new Regex(@"^\[\d+\]$")); foreach (var x in pipeline(iterator)) { rowsProcessed++; if (rowsProcessed % 4 == 1) { Console.WriteLine(x.ToString()); } } } } catch (AmazonS3Exception e) { // If bucket or object does not exist Console.WriteLine("Error encountered ***. Message:'{0}' when reading object", e.Message); } catch (Exception e) { Console.WriteLine("Unknown encountered on server. Message:'{0}' when reading object", e.Message); } // using (JsonTextWriter wr = JsonReaderExtensions.InitJsonOutStream(OutStream)) // { // wr.WriteStartArray(); // foreach (var h in pipeline(humans)) // { // // wr.SerialiseJsonToStream<dynamic>(h); // Console.WriteLine(h); // } // wr.WriteEndArray(); // } var body = new Dictionary <string, string> { { "Rows Processed", rowsProcessed.ToString() }, }; return(new APIGatewayProxyResponse { Body = JsonConvert.SerializeObject(body), StatusCode = 200, Headers = new Dictionary <string, string> { { "Content-Type", "application/json" } } }); }
/* * This class demonstrates the interleaving of search transactions. */ static void Main(string[] args) { Options options = new Options(); if (!options.Parse(args)) Environment.Exit(1); RetsSession session = options.SessionFactory(); try { if (!session.Login(options.user_name, options.user_password)) { Console.WriteLine("Invalid login"); Environment.Exit(1); } } catch (Exception e) { Console.WriteLine("RetsException: " + e); Environment.Exit(1); } RetsVersion version = session.GetDetectedRetsVersion(); Console.WriteLine("RETS Version: " + ((version == RetsVersion.RETS_1_5) ? "1.5" : ((version == RetsVersion.RETS_1_7) ? "1.7" : "1.0"))); /* * Find the key field for the resource. */ RetsMetadata metadata = session.GetMetadata(); MetadataResource metadataResource = metadata.GetResource(options.search_type); if (metadataResource == null) { Console.WriteLine("Invalid resource: " + options.search_type); session.Logout(); Environment.Exit(1); } string keyField = metadataResource.GetKeyField(); /* * Find the timestamp field if it is known (RETS 1.7 and later). If * not known, then the user must provide it. */ MetadataClass metadataClass = metadata.GetClass(options.search_type, options.search_class); if (metadataClass == null) { Console.WriteLine("Invalid resource:class: " + options.search_type + ":" + options.search_class); session.Logout(); Environment.Exit(2); } if (options.classTimeStamp != null && options.classTimeStamp.Length == 0) options.classTimeStamp = metadataClass.GetStringAttribute("ClassTimeStamp"); if (options.classTimeStamp == null || options.classTimeStamp.Length == 0) { Console.WriteLine("Class " + options.search_type + ":" + options.search_class + " has no ClassTimeStamp specified in the metadata."); Console.WriteLine("Please manually provide one using the --timetsamp switch."); session.Logout(); Environment.Exit(2); } /* * See if the last modified timestamp has been provided. If not, use yesterday. */ if (options.lastModified == null || options.lastModified.Length == 0) { DateTime ts = DateTime.Now; options.lastModified = ts.AddDays(-1).ToString("yyyy-MM-dd"); } /* * OK - let's find all listings that have changed since the lastModified date. */ SearchRequest searchRequest = session.CreateSearchRequest( options.search_type, options.search_class, "(" + options.classTimeStamp.ToString() + "=" + options.lastModified.ToString() + "+)"); searchRequest.SetSelect(keyField); searchRequest.SetLimit(SearchRequest.LIMIT_NONE); searchRequest.SetOffset(SearchRequest.OFFSET_NONE); searchRequest.SetCountType(SearchRequest.CountType.RECORD_COUNT_AND_RESULTS); searchRequest.SetStandardNames(false); /* * This starts the outer search. */ SearchResultSet results = session.Search(searchRequest); Console.WriteLine("Record count: " + results.GetCount()); Console.WriteLine(); while (results.HasNext()) { /* * Fetch the listing detail and media. This will cause a separate search transaction * to be open within the outer search transaction. */ SearchRequest listingRequest = session.CreateSearchRequest( options.search_type, options.search_class, "(" + keyField + "=" + results.GetString(keyField) + ")"); listingRequest.SetStandardNames(false); listingRequest.SetLimit(SearchRequest.LIMIT_DEFAULT); listingRequest.SetOffset(SearchRequest.OFFSET_NONE); listingRequest.SetCountType(SearchRequest.CountType.NO_RECORD_COUNT); listingRequest.SetFormatType(SearchRequest.FormatType.COMPACT); SearchResultSet listingResult = session.Search(listingRequest); IEnumerable columns = null; while (listingResult.HasNext()) { if (columns == null) { columns = listingResult.GetColumns(); } /* * Show the listing detail. */ foreach (string column in columns) { Console.WriteLine("{0,15}: {1}", column, listingResult.GetString(column)); } Console.WriteLine(); /* * Now set up to fetch the objects associated with this listing. */ GetObjectRequest getObjectRequest = new GetObjectRequest(options.search_type, "Photo"); getObjectRequest.AddAllObjects(listingResult.GetString(keyField)); GetObjectResponse getObjectResponse = session.GetObject(getObjectRequest); foreach(ObjectDescriptor objectDescriptor in getObjectResponse) { /* * Report the object details. */ string objectKey = objectDescriptor.GetObjectKey(); int objectId = objectDescriptor.GetObjectId(); //string contentType = objectDescriptor.GetContentType(); string description = objectDescriptor.GetDescription(); Console.Write ("Object " + objectKey + ":" + objectId.ToString()); if (description.Length > 0) Console.Write (", description: " + description); Console.WriteLine(); } Console.WriteLine("================="); } } session.Logout(); }
private void bw_DoWork(object sender, DoWorkEventArgs e) { alioss alioss = new alioss(SurveyMsg.OSSRegion, false, SurveyMsg.ProjectId); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); this.bw.ReportProgress(20, "检查更新....."); OssClient ossClient = new OssClient(alioss.endpoint, alioss.accessId, alioss.accessKey); bool flag = false; try { using (IEnumerator <Bucket> enumerator = ossClient.ListBuckets().GetEnumerator()) { while (enumerator.MoveNext()) { if (enumerator.Current.Name == alioss.bucketNameUpdate) { flag = true; break; } } } } catch (Exception) { this.bw.ReportProgress(90, GClass0.smethod_0("朤臰跒悮指垨偠ܫ菱懅淡瑒狞")); MessageBox.Show(GClass0.smethod_0("朤臰跒悮指垨偠菱懅淡瑒狞"), GClass0.smethod_0("牌是擶暱"), MessageBoxButton.OK, MessageBoxImage.Asterisk); stopwatch.Stop(); return; } if (!flag) { this.bw.ReportProgress(90, GClass0.smethod_0("孑冠攊冧剭壶规思")); MessageBox.Show(GClass0.smethod_0("孑冠攊冧剭壶规思"), GClass0.smethod_0("牌是擶暱"), MessageBoxButton.OK, MessageBoxImage.Asterisk); stopwatch.Stop(); return; } string str = ""; int num = this.VersionID.ToLower().IndexOf(GClass0.smethod_0("w")); if (num > -1) { str = this.VersionID.Substring(0, num); } else if (this.VersionID.IndexOf(GClass0.smethod_0("浈諗灉")) > -1) { str = GClass0.smethod_0("浈諗灉"); } else if (this.VersionID.IndexOf(GClass0.smethod_0("漗砸灉")) > -1) { str = GClass0.smethod_0("漗砸灉"); } else if (this.VersionID.IndexOf(GClass0.smethod_0("歠帍灉")) > -1) { str = GClass0.smethod_0("歠帍灉"); } else if (this.VersionID.IndexOf(GClass0.smethod_0("辆厫灉")) > -1) { str = GClass0.smethod_0("辆厫灉"); } ObjectListing objectListing = ossClient.ListObjects(alioss.bucketNameUpdate, alioss.bucketDirUpdate + GClass0.smethod_0(".") + str); if (objectListing.ObjectSummaries.Count <OssObjectSummary>() == 0) { this.bw.ReportProgress(90, GClass0.smethod_0("朣昁灏搪拱悴掄䧴")); MessageBox.Show(GClass0.smethod_0("朣昁灏搪拱悴掄䧴"), GClass0.smethod_0("牌是擶暱"), MessageBoxButton.OK, MessageBoxImage.Asterisk); stopwatch.Stop(); return; } string text = ""; string key = ""; string text2 = this.VersionID.ToLower(); string text3 = text2; string text4 = ""; foreach (OssObjectSummary ossObjectSummary in objectListing.ObjectSummaries) { if (!(ossObjectSummary.Key == alioss.bucketDirUpdate + GClass0.smethod_0("."))) { text = ossObjectSummary.Key.Replace(alioss.bucketDirUpdate + GClass0.smethod_0("."), ""); text4 = text.Replace(GClass0.smethod_0("*űɣͳ"), "").ToLower(); key = ossObjectSummary.Key; if (string.Compare(text4, text3) > 0) { text3 = text4; } } } if (text3 == text2) { this.bw.ReportProgress(90, GClass0.smethod_0("彙卄忺緈戩戅掴畋漮")); MessageBox.Show(GClass0.smethod_0("彙卄忺緈戩戅掴畋漮"), GClass0.smethod_0("牌是擶暱"), MessageBoxButton.OK, MessageBoxImage.Asterisk); stopwatch.Stop(); return; } string text5 = Environment.CurrentDirectory + GClass0.smethod_0("VōɧͰѨթ٫ݢࡦढ़"); if (!Directory.Exists(text5)) { Directory.CreateDirectory(text5); } if (!Directory.Exists(text5)) { this.bw.ReportProgress(90, GClass0.smethod_0("曱撴嬲踧ff")); MessageBox.Show(string.Concat(new string[] { GClass0.smethod_0("曤撿灆搡懺杋鄊躈續沀䓰刼䈉噛太"), Environment.NewLine, Environment.NewLine, GClass0.smethod_0("详嘸擻暾噀蓦蹇龎奁埲烌䗣䈎梃䃵嘻"), Environment.NewLine, text5 }), GClass0.smethod_0("曰撳嬳踤"), MessageBoxButton.OK, MessageBoxImage.Asterisk); stopwatch.Stop(); return; } RarFile rarFile = new RarFile(); string path = text5 + text; this.bw.ReportProgress(40, string.Format(GClass0.smethod_0("丂蹵枀䷰Хտسݿࠡ"), text)); FileStream fileStream = new FileStream(path, FileMode.Create, FileAccess.ReadWrite, FileShare.Read); GetObjectRequest getObjectRequest = new GetObjectRequest(alioss.bucketNameUpdate, key); ossClient.GetObject(getObjectRequest, fileStream); fileStream.Close(); this.bw.ReportProgress(80, string.Format(GClass0.smethod_0("觪劃枀䷰Хտسݿࠡ"), text)); this.strRarFile = path; this.strRarOutputFolder = Environment.CurrentDirectory + GClass0.smethod_0("]"); rarFile.Extract(this.strRarFile, this.strRarOutputFolder, this.strRarOutputFolder, this.strRarPassword); this.bw.ReportProgress(95, GClass0.smethod_0("牎昩擰暳䨸福") + text3); this.bw.ReportProgress(95, string.Format(GClass0.smethod_0("夊甋妀愛st蔞揾ܧࡽवଣ痐㴃"), stopwatch.Elapsed.TotalSeconds.ToString(GClass0.smethod_0("Dij")))); stopwatch.Stop(); }
public Program() { var usingTrustedConnection = string.IsNullOrEmpty(Username) && string.IsNullOrEmpty(Password); var sourceConnection = usingTrustedConnection ? new ServerConnection(ServerName) { LoginSecure = true } : new ServerConnection(ServerName, Username, Password); var sqlServer = new Server(sourceConnection); if(sqlServer != null){ var backup = new Backup(); var dbc = sqlServer.Databases; if (dbc.Contains(DatabaseName)) { backup.Action = BackupActionType.Database; backup.Database = DatabaseName; var dateFilename = DateTime.UtcNow.ToString("dd-MMM-yyyy"); var tempFilename = String.Format("{0}-{1}.bak", DatabaseName, dateFilename); var tempBackupPath = String.Format("{0}{1}", TempFilePath, tempFilename); //remove old backups from this local temp location foreach (var file in Directory.GetFiles(TempFilePath)) { if (file != tempBackupPath) { Console.WriteLine("Removing previous temp backup " + file); File.Delete(file); } } try { var backupDevice = new BackupDeviceItem(tempBackupPath, DeviceType.File); backup.Devices.Add(backupDevice); backup.Checksum = true; backup.ContinueAfterError = false; backup.LogTruncation = BackupTruncateLogType.Truncate; //if file exists then do an incremental, otherwise do a full if (File.Exists(tempBackupPath)) { backup.Incremental = true; } else { backup.Incremental = false; } // Perform backup. backup.SqlBackup(sqlServer); //now move the backup to S3 - overwriting anything that is there with the same name var s3 = new S3Service { AccessKeyID = AccessKeyID, SecretAccessKey = SecretAccessKey }; var bucket = Bucket; s3.AddObject(tempBackupPath, bucket, tempFilename); var metadataOnly = true; foreach(var listEntry in s3.ListObjects(Bucket,"")) { var request = new GetObjectRequest(s3, Bucket, listEntry.Name, metadataOnly); using (var response = request.GetResponse()) { if (response.LastModified < DateTime.UtcNow.AddDays(DaysToKeepS3BackupFor * -1)) { Console.WriteLine("Going to delete old archive " + listEntry.Name); s3.DeleteObject(Bucket,listEntry.Name); } } } Console.Out.WriteLine("Backup to S3 is complete"); System.Threading.Thread.Sleep(10000); } catch(Exception ee) { Console.Out.WriteLine("Exception occurred - do not continue. Wait until next run to try again "+ee.ToString()); System.Threading.Thread.Sleep(10000); } } } }
public async Task <CreateImageFromImageResult> CreateImageFromExistingImage(FolderIdType folderId, FileIdType fileId, FileIdType newFileId, int quality, int width, bool asProgressive = true) { var fileName = $"{_prefix}/{folderId}/{fileId}"; var newFileName = $"{_prefix}/{folderId}/{newFileId}"; var newGetRequest = new GetObjectMetadataRequest { BucketName = _bucketName, Key = newFileName }; try { var newGetResponse = await _client.GetObjectMetadataAsync(newGetRequest); var newUri = GetUri(fileName); throw new Exception($"The blob you want to create already exists - {newUri}!"); } catch (AmazonS3Exception ex) { if (ex.ErrorCode == "NotFound") { var oldGetRequest = new GetObjectRequest { BucketName = _bucketName, Key = fileName }; var oldRequest = await _client.GetObjectAsync(oldGetRequest); var oldUri = GetUri(fileName); if (oldRequest.HttpStatusCode == System.Net.HttpStatusCode.OK) { byte[] bytes; using (var ms = new MemoryStream()) { await oldRequest.ResponseStream.CopyToAsync(ms); ms.Position = 0; bytes = ms.ToArray(); } var imageResult = await _imageService.GetImage(bytes, width, quality, asProgressive); var uri = await Upload(folderId, newFileId, imageResult.Bytes, oldRequest.Headers.ContentType); var thumbNailImageResult = await _imageService.GetImage(bytes, 250, 60, true); var thumbnailUri = await Upload(folderId, newFileId, thumbNailImageResult.Bytes, oldRequest.Headers.ContentType, true); return(new CreateImageFromImageResult { ImageProcessResult = imageResult, Uri = uri, ThumbnailUri = thumbnailUri }); } else { throw new Exception($"The blob you want to copy doesn't exists - {oldUri}!"); } } else { throw; } } }
public void GetObject(GetObjectRequest request, Callback.OnSuccessCallback <CosResult> successCallback, Callback.OnFailedCallback failCallback) { schedue(request, new GetObjectResult(), successCallback, failCallback); }
public COSXMLDownloadTask(GetObjectRequest request) : base(request.Bucket, request.Key) { this.getObjectRequest = request; }
public static GetObjectResponse GetObject(this IAmazonS3 client, GetObjectRequest request) { return(client.GetObjectAsync(request).GetResult()); }
static void UploadFileWithClientSideEncryption(string filePath) { string kmsKeyID = null; var objectKey = System.IO.Path.GetFileName(filePath); using (var kmsClient = new AmazonKeyManagementServiceClient(defaultEndpoint)) { // var response = kmsClient.CreateKeyAsync(new CreateKeyRequest()).GetAwaiter().GetResult(); kmsKeyID = GetKeyByAlias(keyName, kmsClient); // var keyMetadata = keyData?.KeyMetadata; // An object that contains information about the CMK created by this operation. var kmsEncryptionMaterials = new EncryptionMaterials(kmsKeyID); //set encryption context using (var s3Client = new AmazonS3EncryptionClient(defaultEndpoint, kmsEncryptionMaterials)) { // encrypt and put object var putRequest = new PutObjectRequest { BucketName = bucketName, Key = objectKey, FilePath = filePath }; putRequest.Metadata.Add("x-amz-meta-moo", "This is a test"); // putRequest.Headers["x-amz-matdesc"] = System.Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(jsonStringEncryptionContext)); putRequest.Headers["x-amz-server-side-encryption"] = "aws:kms"; putRequest.Headers["x-amz-server-side-encryption-aws-kms-key-id"] = kmsKeyID; putRequest.Headers["x-amz-server-side-encryption-context"] = System.Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(jsonStringEncryptionContext)); s3Client.PutObjectAsync(putRequest).GetAwaiter().GetResult(); } // The KeyID is actually embedded in the metadata of the object and the encryptionclient automatically looks it up so you don't actually have to do that yourself var kem2 = new EncryptionMaterials("1111111-11111-11111111-11111111"); using (var s3Client2 = new AmazonS3EncryptionClient(defaultEndpoint, kem2)) { // get object and decrypt var getRequest = new GetObjectRequest { BucketName = bucketName, Key = objectKey }; string fPath2 = System.IO.Path.Combine(System.IO.Path.GetDirectoryName(filePath), System.IO.Path.GetFileNameWithoutExtension(filePath) + "_" + new Random().Next(0, 1000).ToString() + System.IO.Path.GetExtension(filePath)); using (var getResponse = s3Client2.GetObjectAsync(getRequest).GetAwaiter().GetResult()) using (var stream = getResponse.ResponseStream) using (var reader = new StreamReader(stream)) { using (var fileStream = new FileStream(fPath2, FileMode.Create, FileAccess.Write)) { stream.CopyTo(fileStream); fileStream.Flush(); fileStream.Close(); } } Console.WriteLine($"Object written to {fPath2}"); } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
private static GetObjectResponse WrapNoSuchKeyException(AmazonS3EncryptionClient encryptionClient, GetObjectRequest request) { GetObjectResponse response; try { response = encryptionClient.GetObject(request); } catch (Exception e) { AmazonS3Exception s3Exception = e as AmazonS3Exception; if (s3Exception != null && NoSuchKeyErrorCode.Equals(s3Exception.ErrorCode)) { throw new Exception("One or more of the objects expected to be written by the Java test are missing. " + "Please run EncryptionInteropTest.java and then re-run this test."); } else { throw; } } return(response); }
public GetObjectResponseDeserializer(GetObjectRequest getObjectRequest, IServiceClient serviceClient) : base(null) { _getObjectRequest = getObjectRequest; _serviceClient = serviceClient; }
/// <inheritdoc/> public Task <GetObjectResponse> GetObjectAsync(GetObjectRequest request, CancellationToken cancellationToken = default(CancellationToken)) { throw new NotImplementedException(); }
static void ApplyCloudFormationChangeSetExample() { string bucket_Name = QSS3BucketName; string templateName = QSS3KeyPrefix + TdwUtils.cfClassPathBastionChangeSet.Replace("tdw_cf_template\\", ""); string stack_name = QSS3KeyPrefix + TdwUtils.cfClassPathBastion.Replace("tdw_cf_template\\", ""); stack_name = stack_name.Replace("-", ""); stack_name = stack_name.Replace(".template", ""); AmazonS3Client s3Client = new AmazonS3Client(); Amazon.CloudFormation.AmazonCloudFormationClient cfClient = new AmazonCloudFormationClient(); GetObjectRequest getObjectRequest = new GetObjectRequest { BucketName = bucket_Name, Key = templateName, }; string data = null; using (GetObjectResponse getObjectResponse = s3Client.GetObject(getObjectRequest)) { using (var stream = getObjectResponse.ResponseStream) using (var reader = new StreamReader(stream)) { data = reader.ReadToEnd(); } } List <string> CfCapabilities = new List <string>(); CfCapabilities.Add("CAPABILITY_IAM"); List <Amazon.CloudFormation.Model.Parameter> parameters = new List <Amazon.CloudFormation.Model.Parameter>(); parameters.Add(new Parameter { ParameterKey = "pEnvTag", ParameterValue = "development" }); List <string> notificationArns = new List <string>(); notificationArns.Add("aws:sns:eu-west-1:009837347446:tdwcftdevmainbastion-LoggingTemplate-1E3KD8XDHOSTY-rSecurityAlarmTopic-1TNN0GI7819UM"); List <string> resourceTypes = new List <string>(); resourceTypes.Add("AWS::*"); List <Amazon.CloudFormation.Model.Tag> tagList = new List <Amazon.CloudFormation.Model.Tag>(); tagList.Add(new Amazon.CloudFormation.Model.Tag() { Key = "environment", Value = "development" }); CreateChangeSetRequest cfReq = new CreateChangeSetRequest() { Capabilities = CfCapabilities, ChangeSetName = "tdwv010001", ChangeSetType = ChangeSetType.UPDATE, ClientToken = "fromappsettingsv010001", Description = "Adding kinesis template to tdw stack and parameterizing env parameter", //NotificationARNs = notificationArns, Parameters = parameters, //ResourceTypes = resourceTypes, //RoleARN StackName = stack_name, Tags = tagList, TemplateBody = data //UsePreviousTemplate = true }; CreateChangeSetResponse cfResp = cfClient.CreateChangeSet(cfReq); }
public IDeserializer <ServiceResponse, OssObject> CreateGetObjectResultDeserializer(GetObjectRequest request, IServiceClient client) { return(new GetObjectResponseDeserializer(request, client)); }
static void TestCfStack(EncryptionMaterials encryptionMaterials) { string bucket_Name = QSS3BucketName; string templateName = QSS3KeyPrefix + TdwUtils.cfClassPathBastion.Replace("tdw_cf_template\\", ""); string stack_name = templateName.Replace("-", ""); stack_name = stack_name.Replace(".template", ""); //AmazonS3EncryptionClient s3Client = new AmazonS3EncryptionClient(encryptionMaterials); AmazonS3Client s3Client = new AmazonS3Client(); GetObjectRequest getObjectRequest = new GetObjectRequest { BucketName = bucket_Name, Key = templateName, }; string data = null; using (GetObjectResponse getObjectResponse = s3Client.GetObject(getObjectRequest)) { using (var stream = getObjectResponse.ResponseStream) using (var reader = new StreamReader(stream)) { data = reader.ReadToEnd(); } } Amazon.CloudFormation.AmazonCloudFormationClient cfClient = new AmazonCloudFormationClient(); ValidateTemplateResponse templateResponse = cfClient.ValidateTemplate(new ValidateTemplateRequest() { TemplateBody = data }); List <string> capabilities = templateResponse.Capabilities; string capabilitiesReason = templateResponse.CapabilitiesReason; string description = templateResponse.Description; List <TemplateParameter> parameters = templateResponse.Parameters; if (parameters.Any()) { Console.WriteLine(" Parameters:"); foreach (var p in parameters) { Console.WriteLine(" {0} = {1}", p.ParameterKey, p.Description); } } //try //{ // DeleteStackRequest deleteRequest = new DeleteStackRequest() { StackName = stack_name }; // cfClient.DeleteStack(deleteRequest); //} //catch (Exception ex) //{ // ex = null; //} DescribeStacksResponse testForStackDescResp = new DescribeStacksResponse(); try { testForStackDescResp = cfClient.DescribeStacks(new DescribeStacksRequest() { StackName = stack_name }); } catch (Exception ex) { testForStackDescResp = null; } if (testForStackDescResp == null) { List <string> CfCapabilities = new List <string>(); CfCapabilities.Add("CAPABILITY_IAM"); CreateStackRequest stackRequest = new CreateStackRequest() { StackName = stack_name, TemplateBody = data, Capabilities = CfCapabilities }; //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pDBPassword", ParameterValue = "LiverpoolFC" } ); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pNotifyEmail", ParameterValue = "*****@*****.**" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pEC2KeyPairBastion", ParameterValue = "BastionSshKvp" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pEC2KeyPair", ParameterValue = "Ec2SshKvp" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pSupportsConfig", ParameterValue = "Yes" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pAvailabilityZoneA", ParameterValue = "eu-west-1a" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pAvailabilityZoneB", ParameterValue = "eu-west-1b" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pVPCTenancy", ParameterValue = "default" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "QSS3BucketName", ParameterValue = QSS3BucketName }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "QSS3KeyPrefix", ParameterValue = QSS3KeyPrefix }); templateResponse = cfClient.ValidateTemplate(new ValidateTemplateRequest() { TemplateBody = data }); CreateStackResponse stackResponse = cfClient.CreateStack(stackRequest); } testForStackDescResp = cfClient.DescribeStacks(new DescribeStacksRequest()); foreach (var stack in testForStackDescResp.Stacks) { Console.WriteLine("stack: {0}", stack.StackName); Console.WriteLine(" status: {0}", stack.StackStatus); Console.WriteLine(" created: {0}", stack.CreationTime); var ps = stack.Parameters; if (ps.Any()) { Console.WriteLine(" parameters:"); foreach (var p in ps) { Console.WriteLine(" {0} = {1}", p.ParameterKey, p.ParameterValue); } } } }
public async Task <String> FunctionHandler(S3Event evnt, ILambdaContext context) { int counter = 0; string responseBody = ""; var s3Event = evnt.Records?[0].S3; context.Logger.LogLine($"event:{s3Event.Bucket.Name}"); if (s3Event == null) { return("No Events"); } try { GetObjectRequest request = new GetObjectRequest { BucketName = s3Event.Bucket.Name, Key = s3Event.Object.Key }; using (GetObjectResponse response = await S3Client.GetObjectAsync(request)) using (Stream responseStream = response.ResponseStream) using (StreamReader reader = new StreamReader(responseStream)) { string output = ""; responseBody = reader.ReadToEnd(); // Now you process the response body. string[] lines = responseBody.Split('\n'); Dictionary <string, dynamic> scoresList = new Dictionary <string, dynamic>(); if (lines.Length == 0) { throw new InvalidOperationException("The file is empty"); } var rows = lines.Select(l => l.Split(',').ToArray()).ToArray(); for (int i = 1; i < lines.Length; i++) { int firstpins = 0, secondpins = 0, thirdpins = 0; firstpins = findFallenPins(rows[i][1], rows[i][2], rows[i][3]); if (i < (lines.Length - 1)) { secondpins = findFallenPins(rows[i + 1][1], rows[i + 1][2], rows[i + 1][3]); } if (i < (lines.Length - 2)) { thirdpins = findFallenPins(rows[i + 2][1], rows[i + 2][2], rows[i + 2][3]); } int score = 0; if (string.IsNullOrWhiteSpace(rows[i][3])) { if (i < (lines.Length - 2)) { if (Convert.ToInt32(rows[i][1]) == 10) { if (rows[i][0] == rows[i + 2][0]) { if (Convert.ToInt32(rows[i + 1][1]) == 10) { score = 10 + 10 + Convert.ToInt32(rows[i + 2][1]); } else if (Convert.ToInt32(rows[i + 1][1]) < 10) { score = 10 + secondpins; } } else { score = 10 + Convert.ToInt32(rows[i + 1][1]) + Convert.ToInt32(rows[i + 1][2]); } } else if (firstpins == 10) { score = firstpins + Convert.ToInt32(rows[i + 1][1]); } else { score = firstpins; } } else if (i == (lines.Length - 2)) { if (Convert.ToInt32(rows[i][1]) == 10) { score = 10 + Convert.ToInt32(rows[i + 1][1]) + Convert.ToInt32(rows[i + 1][2]); } else if (firstpins == 10) { score = 10 + Convert.ToInt32(rows[i + 1][1]); } else { score = firstpins; } } else if (i == (lines.Length - 1)) { score = firstpins; } } else if (!string.IsNullOrWhiteSpace(rows[i][3])) { score = Convert.ToInt32(rows[i][1]) + Convert.ToInt32(rows[i][2]) + Convert.ToInt32(rows[i][3]); } context.Logger.LogLine($"score:{score}"); dynamic val; if (scoresList.TryGetValue(rows[i][0], out val)) { context.Logger.LogLine($"Counter Value:{counter}"); counter++; scoresList[rows[i][0]] = val + score; if (counter >= 10) { context.Logger.LogLine($"Error:more than 10 games played"); scoresList[rows[i][0]] = "Error:more than 10 games played"; } context.Logger.LogLine($"acore added:"); } else { if (counter == 9 || counter == 0) { scoresList.Add(rows[i][0], score); } else { context.Logger.LogLine($"Error:Less than 10 games played"); scoresList[rows[i - 1][0]] = "Error:Less than 10 games played"; scoresList.Add(rows[i][0], score); } counter = 0; } } foreach (KeyValuePair <string, dynamic> kvp in scoresList) { output += kvp.Key + ',' + kvp.Value + '\n'; } //context.Logger.LogLine($"{output}"); PutObjectRequest request1 = new PutObjectRequest { BucketName = s3Event.Bucket.Name, Key = "scores.csv", ContentBody = output }; var result = await S3Client.PutObjectAsync(request1); context.Logger.LogLine($"{output}"); context.Logger.LogLine($"end"); return("Sucess"); } } catch (Exception e) { //context.Logger.LogLine($"Error getting object {s3Event.Object.Key} from bucket {s3Event.Bucket.Name}. Make sure they exist and your bucket is in the same region as this function."); context.Logger.LogLine(e.Message); context.Logger.LogLine(e.StackTrace); throw; } }
/// <summary> /// Gets a data stream for an existing object in S3. It is your responsibility to close /// the Stream when you are finished. /// </summary> public Stream GetObjectStream(string bucketName, string key, out long contentLength, out string contentType) { var request = new GetObjectRequest(this, bucketName, key); GetObjectResponse response = request.GetResponse(); contentLength = response.ContentLength; contentType = response.ContentType; return response.GetResponseStream(); }
public Program() { var usingTrustedConnection = string.IsNullOrEmpty(Username) && string.IsNullOrEmpty(Password); var sourceConnection = usingTrustedConnection ? new ServerConnection(ServerName) { LoginSecure = true } : new ServerConnection(ServerName, Username, Password); var sqlServer = new Server(sourceConnection); if (sqlServer != null) { var backup = new Backup(); var dbc = sqlServer.Databases; if (dbc.Contains(DatabaseName)) { backup.Action = BackupActionType.Database; backup.Database = DatabaseName; var dateFilename = DateTime.UtcNow.ToString("dd-MMM-yyyy"); var tempFilename = String.Format("{0}-{1}.bak", DatabaseName, dateFilename); var tempBackupPath = String.Format("{0}{1}", TempFilePath, tempFilename); //remove old backups from this local temp location foreach (var file in Directory.GetFiles(TempFilePath)) { if (file != tempBackupPath) { Console.WriteLine("Removing previous temp backup " + file); File.Delete(file); } } try { var backupDevice = new BackupDeviceItem(tempBackupPath, DeviceType.File); backup.Devices.Add(backupDevice); backup.Checksum = true; backup.ContinueAfterError = false; backup.LogTruncation = BackupTruncateLogType.Truncate; //if file exists then do an incremental, otherwise do a full if (File.Exists(tempBackupPath)) { backup.Incremental = true; } else { backup.Incremental = false; } // Perform backup. backup.SqlBackup(sqlServer); //now move the backup to S3 - overwriting anything that is there with the same name var s3 = new S3Service { AccessKeyID = AccessKeyID, SecretAccessKey = SecretAccessKey }; var bucket = Bucket; s3.AddObject(tempBackupPath, bucket, tempFilename); var metadataOnly = true; foreach (var listEntry in s3.ListObjects(Bucket, "")) { var request = new GetObjectRequest(s3, Bucket, listEntry.Name, metadataOnly); using (var response = request.GetResponse()) { if (response.LastModified < DateTime.UtcNow.AddDays(DaysToKeepS3BackupFor * -1)) { Console.WriteLine("Going to delete old archive " + listEntry.Name); s3.DeleteObject(Bucket, listEntry.Name); } } } Console.Out.WriteLine("Backup to S3 is complete"); System.Threading.Thread.Sleep(10000); } catch (Exception ee) { Console.Out.WriteLine("Exception occurred - do not continue. Wait until next run to try again " + ee.ToString()); System.Threading.Thread.Sleep(10000); } } } }
/// <summary> /// Given an S3 Url, this method will return the last modified date or the file, or /// null if it doesn't exist. /// </summary> /// <returns>The last modified date</returns> /// <param name="fullS3Url">Full s3 URL.</param> public DateTime? LastModified(string fullS3Url) { var s3Service = Utilities.GetS3Service (); var parts = S3Parts.FromUrl (fullS3Url); if (!Exists (fullS3Url)) { return null; } var req = new GetObjectRequest (s3Service, parts.Bucket, parts.File, true); try { using (var response = req.GetResponse ()) { return response.LastModified; } } catch (Exception ex) { log.DebugFormat ( "LastModified - error when querying file {0}, err = {1}, " + "returning null", fullS3Url, ex.Message); return null; } }
static void Main(string[] args) { bool useStream = true; if ((args.Length == 1) && args[0].Equals("bytes")) { Console.WriteLine("Using GetDataAsBytes()"); useStream = false; } RetsSession session = new RetsSession( "http://demo.crt.realtors.org:6103/rets/login"); if (!session.Login("Joe", "Schmoe")) { Console.WriteLine("Invalid login"); Environment.Exit(1); } GetObjectRequest request = new GetObjectRequest("Property", "Photo"); request.AddAllObjects("LN000001"); GetObjectResponse response = session.GetObject(request); foreach(ObjectDescriptor objectDescriptor in response) { string objectKey = objectDescriptor.GetObjectKey(); int objectId = objectDescriptor.GetObjectId(); string contentType = objectDescriptor.GetContentType(); string description = objectDescriptor.GetDescription(); string location = objectDescriptor.GetLocationUrl(); Console.Write(objectKey + " object #" + objectId); if (description.Length != 0) Console.Write(", desription: " + description); if (location.Length != 0) Console.Write(", location: " + location); if (objectDescriptor.GetRetsReplyCode() != 0) Console.Write (", ***** " + objectDescriptor.GetRetsReplyCode() + ": " + objectDescriptor.GetRetsReplyText()); Console.WriteLine(); Hashtable extensions = new Hashtable(); extensions["image/jpeg"] = "jpg"; extensions["image/gif"] = "gif"; extensions["text/xml"] = "xml"; string extension = (string) extensions[contentType]; string outputFileName = objectKey + "-" + objectId + "." + extension; /* * Only save the object if there was no error and we're not using the * location=1 option. */ if (objectDescriptor.GetRetsReplyCode() == 0 && location.Length == 0) { Stream outputStream = File.OpenWrite(outputFileName); if (useStream) { const int BUFFER_SIZE = 1024; Stream stream = objectDescriptor.GetDataStream(); byte[] buffer = new Byte[BUFFER_SIZE]; int bytesRead; while ((bytesRead = stream.Read(buffer, 0, BUFFER_SIZE)) > 0) { outputStream.Write(buffer, 0, bytesRead); } } else { byte[] data = objectDescriptor.GetDataAsBytes(); BinaryWriter w = new BinaryWriter(outputStream); w.Write(data); w.Close(); } outputStream.Close(); } } session.Logout(); }
public GetObjectResult GetObject(GetObjectRequest request) { return((Model.Object.GetObjectResult)excute(request, new Model.Object.GetObjectResult())); }
private static void ProcessResponseHandlers(IExecutionContext executionContext) { AmazonWebServiceResponse response = executionContext.ResponseContext.Response; IRequest request = executionContext.RequestContext.Request; IWebResponseData webResponseData = executionContext.ResponseContext.HttpResponse; bool isSse = HasSSEHeaders(webResponseData); var getObjectResponse = response as GetObjectResponse; if (getObjectResponse != null) { GetObjectRequest getObjectRequest = request.OriginalRequest as GetObjectRequest; getObjectResponse.BucketName = getObjectRequest.BucketName; getObjectResponse.Key = getObjectRequest.Key; // If ETag is present and is an MD5 hash (not a multi-part upload ETag), and no byte range is specified, // wrap the response stream in an MD5Stream. // If there is a customer encryption algorithm the etag is not an MD5. if (!string.IsNullOrEmpty(getObjectResponse.ETag) && !getObjectResponse.ETag.Contains("-") && !isSse && getObjectRequest.ByteRange == null) { string etag = getObjectResponse.ETag.Trim(etagTrimChars); byte[] expectedHash = AWSSDKUtils.HexStringToBytes(etag); HashStream hashStream = new MD5Stream(getObjectResponse.ResponseStream, expectedHash, getObjectResponse.ContentLength); getObjectResponse.ResponseStream = hashStream; } } var deleteObjectsResponse = response as DeleteObjectsResponse; if (deleteObjectsResponse != null) { if (deleteObjectsResponse.DeleteErrors != null && deleteObjectsResponse.DeleteErrors.Count > 0) { throw new DeleteObjectsException(deleteObjectsResponse as DeleteObjectsResponse); } } var putObjectResponse = response as PutObjectResponse; var putObjectRequest = request.OriginalRequest as PutObjectRequest; if (putObjectRequest != null) { // If InputStream was a MD5Stream, compare calculated hash to returned etag MD5Stream hashStream = putObjectRequest.InputStream as MD5Stream; if (hashStream != null) { if (putObjectResponse != null && !isSse) { // Stream may not have been closed, so force calculation of hash hashStream.CalculateHash(); CompareHashes(putObjectResponse.ETag, hashStream.CalculatedHash); } // Set InputStream to its original value putObjectRequest.InputStream = hashStream.GetNonWrapperBaseStream(); } } var listObjectsResponse = response as ListObjectsResponse; if (listObjectsResponse != null) { if (listObjectsResponse.IsTruncated && string.IsNullOrEmpty(listObjectsResponse.NextMarker) && listObjectsResponse.S3Objects.Count > 0) { listObjectsResponse.NextMarker = listObjectsResponse.S3Objects.Last().Key; } } var uploadPartRequest = request.OriginalRequest as UploadPartRequest; var uploadPartResponse = response as UploadPartResponse; if (uploadPartRequest != null) { if (uploadPartResponse != null) { uploadPartResponse.PartNumber = uploadPartRequest.PartNumber; } // If InputStream was a MD5Stream, compare calculated hash to returned etag MD5Stream hashStream = uploadPartRequest.InputStream as MD5Stream; if (hashStream != null) { if (uploadPartResponse != null && !isSse) { // Stream may not have been closed, so force calculation of hash hashStream.CalculateHash(); CompareHashes(uploadPartResponse.ETag, hashStream.CalculatedHash); } // Set InputStream to its original value uploadPartRequest.InputStream = hashStream.GetNonWrapperBaseStream(); } } var copyPartResponse = response as CopyPartResponse; if (copyPartResponse != null) { copyPartResponse.PartNumber = ((CopyPartRequest)request.OriginalRequest).PartNumber; } AmazonS3Client.CleanupRequest(request.OriginalRequest); }
internal GetObjectState(NetworkStream stream, KyruApplication app, GetObjectRequest getObjectRequest) { this.stream = stream; this.app = app; this.getObjectRequest = getObjectRequest; }
/// <summary> /// Returns true if the given object exists in the given bucket. /// </summary> public bool ObjectExists(string bucketName, string key) { var request = new GetObjectRequest(this, bucketName, key, true); // This is the recommended method from the S3 API docs. try { using (GetObjectResponse response = request.GetResponse()) return true; } catch (WebException exception) { var response = exception.Response as HttpWebResponse; if (response != null && response.StatusCode == HttpStatusCode.NotFound) return false; else throw; } }
private async Task <XElement> GetElementFromKey(S3Object item, SemaphoreSlim throttler, CancellationToken ct) { await throttler.WaitAsync(ct); try { logger?.LogDebug("Retrieving DataProtection key at S3 location {0} in bucket {1}", item.Key, Config.Bucket); var gr = new GetObjectRequest { BucketName = Config.Bucket, Key = item.Key, ServerSideEncryptionCustomerMethod = ServerSideEncryptionCustomerMethod.None }; if (Config.ServerSideEncryptionCustomerMethod != null && Config.ServerSideEncryptionCustomerMethod != ServerSideEncryptionCustomerMethod.None) { gr.ServerSideEncryptionCustomerMethod = Config.ServerSideEncryptionCustomerMethod; gr.ServerSideEncryptionCustomerProvidedKey = Config.ServerSideEncryptionCustomerProvidedKey; gr.ServerSideEncryptionCustomerProvidedKeyMD5 = Config.ServerSideEncryptionCustomerProvidedKeyMd5; } using (var response = await s3Client.GetObjectAsync(gr, ct).ConfigureAwait(false)) { // Skip empty folder keys if (item.Key.EndsWith("/") && response.ContentLength == 0) { return(null); } // Look for checksum. If it's being checked and the ETag is suitable, trust that most of all (since S3 calculates this post-upload and // upload content is checked by Md5Digest), otherwise use MD5 metadata if configured. AWS SDK does the ETag check for us at time of writing, so usually // ValidateETag isn't needed. string headerChecksum = null; bool testChecksum = Config.ValidateETag && GetETagChecksum(response, out headerChecksum) || Config.ValidateMd5Metadata && GetHeaderChecksum(response, out headerChecksum); using (var md5 = testChecksum ? MD5.Create() : null) { XElement elementToReturn; using (var hashStream = testChecksum ? new CryptoStream(response.ResponseStream, md5, CryptoStreamMode.Read) : response.ResponseStream) { // Stream returned from AWS SDK does not automatically uncompress even with Content-Encoding set // Not that surprising considering that S3 treats the data as just N bytes; that it was compressed // client-side doesn't really matter. // // Compatibility: If we set compress=true but load something without gzip encoding then skip and // load as uncompressed. If we set compress=false but load something with gzip encoding, load as // compressed otherwise loading won't work. if (response.Headers.ContentEncoding == "gzip") { using (var responseStream = new GZipStream(hashStream, CompressionMode.Decompress)) { elementToReturn = XElement.Load(responseStream); } } else { elementToReturn = XElement.Load(hashStream); } } if (testChecksum) { var md5Value = BitConverter.ToString(md5.Hash).Replace("-", "").ToLowerInvariant(); if (md5Value != headerChecksum) { throw new InvalidOperationException($"Streamed S3 data has MD5 of {md5Value} which does not match provided MD5 metadata {headerChecksum} - corruption in transit"); } } return(elementToReturn); } } } finally { throttler.Release(); } }