static void TestCfStack(EncryptionMaterials encryptionMaterials) { string bucket_Name = QSS3BucketName; string templateName = QSS3KeyPrefix + TdwUtils.cfClassPathBastion.Replace("tdw_cf_template\\", ""); string stack_name = templateName.Replace("-", ""); stack_name = stack_name.Replace(".template", ""); //AmazonS3EncryptionClient s3Client = new AmazonS3EncryptionClient(encryptionMaterials); AmazonS3Client s3Client = new AmazonS3Client(); GetObjectRequest getObjectRequest = new GetObjectRequest { BucketName = bucket_Name, Key = templateName, }; string data = null; using (GetObjectResponse getObjectResponse = s3Client.GetObject(getObjectRequest)) { using (var stream = getObjectResponse.ResponseStream) using (var reader = new StreamReader(stream)) { data = reader.ReadToEnd(); } } Amazon.CloudFormation.AmazonCloudFormationClient cfClient = new AmazonCloudFormationClient(); ValidateTemplateResponse templateResponse = cfClient.ValidateTemplate(new ValidateTemplateRequest() { TemplateBody = data }); List <string> capabilities = templateResponse.Capabilities; string capabilitiesReason = templateResponse.CapabilitiesReason; string description = templateResponse.Description; List <TemplateParameter> parameters = templateResponse.Parameters; if (parameters.Any()) { Console.WriteLine(" Parameters:"); foreach (var p in parameters) { Console.WriteLine(" {0} = {1}", p.ParameterKey, p.Description); } } //try //{ // DeleteStackRequest deleteRequest = new DeleteStackRequest() { StackName = stack_name }; // cfClient.DeleteStack(deleteRequest); //} //catch (Exception ex) //{ // ex = null; //} DescribeStacksResponse testForStackDescResp = new DescribeStacksResponse(); try { testForStackDescResp = cfClient.DescribeStacks(new DescribeStacksRequest() { StackName = stack_name }); } catch (Exception ex) { testForStackDescResp = null; } if (testForStackDescResp == null) { List <string> CfCapabilities = new List <string>(); CfCapabilities.Add("CAPABILITY_IAM"); CreateStackRequest stackRequest = new CreateStackRequest() { StackName = stack_name, TemplateBody = data, Capabilities = CfCapabilities }; //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pDBPassword", ParameterValue = "LiverpoolFC" } ); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pNotifyEmail", ParameterValue = "*****@*****.**" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pEC2KeyPairBastion", ParameterValue = "BastionSshKvp" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pEC2KeyPair", ParameterValue = "Ec2SshKvp" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pSupportsConfig", ParameterValue = "Yes" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pAvailabilityZoneA", ParameterValue = "eu-west-1a" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pAvailabilityZoneB", ParameterValue = "eu-west-1b" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "pVPCTenancy", ParameterValue = "default" }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "QSS3BucketName", ParameterValue = QSS3BucketName }); //stackRequest.Parameters.Add(new Parameter() { ParameterKey = "QSS3KeyPrefix", ParameterValue = QSS3KeyPrefix }); templateResponse = cfClient.ValidateTemplate(new ValidateTemplateRequest() { TemplateBody = data }); CreateStackResponse stackResponse = cfClient.CreateStack(stackRequest); } testForStackDescResp = cfClient.DescribeStacks(new DescribeStacksRequest()); foreach (var stack in testForStackDescResp.Stacks) { Console.WriteLine("stack: {0}", stack.StackName); Console.WriteLine(" status: {0}", stack.StackStatus); Console.WriteLine(" created: {0}", stack.CreationTime); var ps = stack.Parameters; if (ps.Any()) { Console.WriteLine(" parameters:"); foreach (var p in ps) { Console.WriteLine(" {0} = {1}", p.ParameterKey, p.ParameterValue); } } } }
public ActionResult Create(project_photo project_photo, int projectid = 0, int returntoproject = 0) { var project = db.projects.FirstOrDefault(x => x.ProjectID == project_photo.ProjectID && x.UserID == CurrentUser.UserID); if (project == null) { return(RedirectToAction("Index", "Home")); } //File Upload if (Request.Files != null && Request.Files.Count > 0) { foreach (string keyname in Request.Files) { HttpPostedFileBase file = Request.Files[keyname]; if (file != null && file.ContentLength > 0 && !string.IsNullOrEmpty(file.FileName)) { //file upload string ext = Path.GetExtension(file.FileName).ToLower(); if (ext == ".png" || ext == ".jpg" || ext == ".jpeg" || ext == ".gif") { try { using (Image tmp = Image.FromStream(file.InputStream)) { //resize+crop int width = int.Parse(ConfigurationManager.AppSettings["Image_Gallery_Width"]); int height = int.Parse(ConfigurationManager.AppSettings["Image_Gallery_Height"]); string name = getTimestamp() + ".jpg"; string filepath = string.Format("projects/{0}/photos/{1}", project_photo.ProjectID, name); string address = ConfigurationManager.AppSettings["AWSS3BucketUrl"] + filepath; //send using (Image resized = tmp.GetResizedImage(width, height, true)) { var request = new PutObjectRequest().WithBucketName(ConfigurationManager.AppSettings["AWSS3Bucket"]).WithKey(filepath); using (MemoryStream buffer = new MemoryStream()) { resized.Save(buffer, ImageHelper.GetJpgEncoder(), ImageHelper.GetJpgEncoderParameters(80)); request.InputStream = buffer; AmazonS3Client s3Client = new AmazonS3Client(); s3Client.PutObject(request); } } ModelState.Remove(keyname); ModelState.Add(keyname, new ModelState()); ModelState.SetModelValue(keyname, new ValueProviderResult(address, address, null)); project_photo.Url = address; } } catch (Exception ex) { ModelState.AddModelError(keyname, "Upload error: " + ex.Message); } } else { ModelState.AddModelError(keyname, "Invalid file type"); } } } } if (ModelState.IsValid) { project_photo.ProjectID = project.ProjectID; db.project_photo.AddObject(project_photo); db.SaveChanges(); if (returntoproject > 0) { return(RedirectToAction("Edit", "Projects", new { id = project_photo.ProjectID }, "photos")); } return(RedirectToAction("Index", "ProjectPhotos", new { Area = "Publisher", projectid = project.ProjectID })); } ViewBag.returntoproject = returntoproject; ViewBag.Project = db.projects.FirstOrDefault(x => x.ProjectID == project_photo.ProjectID); return(View(project_photo)); }
private async void button1_Click(object sender, EventArgs e) { AmazonCognitoIdentityProviderClient provider = new AmazonCognitoIdentityProviderClient(new Amazon.Runtime.AnonymousAWSCredentials(), Amazon.RegionEndpoint.USEast2); String AccessToken = ""; CognitoUserPool userPool = new CognitoUserPool("us-east-2_5823vgrxD", "7dpaqpkf9b5pjp9gg94k3gndld", provider); CognitoUser user = new CognitoUser("AppUser2", "7dpaqpkf9b5pjp9gg94k3gndld", userPool, provider); InitiateSrpAuthRequest authRequest = new InitiateSrpAuthRequest() { Password = "******" }; AuthFlowResponse authResponse = await user.StartWithSrpAuthAsync(authRequest).ConfigureAwait(false); if (authResponse.ChallengeName == ChallengeNameType.NEW_PASSWORD_REQUIRED) { String NewPassword; frmEnterNewPassword f = new frmEnterNewPassword(); if (f.ShowDialog() == DialogResult.OK) { NewPassword = f.Controls["txtPassword"].Text; authResponse = await user.RespondToNewPasswordRequiredAsync(new RespondToNewPasswordRequiredRequest() { SessionID = authResponse.SessionID, NewPassword = NewPassword }); AccessToken = authResponse.AuthenticationResult.AccessToken; } } else { AccessToken = authResponse.AuthenticationResult.AccessToken; } CognitoAWSCredentials credentials = user.GetCognitoAWSCredentials("us-east-2:bd9037c3-26d6-432a-a6a9-9d66281f49ba", Amazon.RegionEndpoint.USEast2); // CognitoAWSCredentials credentials = new CognitoAWSCredentials("us-east-2:bd9037c3-26d6-432a-a6a9-9d66281f49ba", Amazon.RegionEndpoint.USEast2); // credentials.AddLogin("www.amazon.com", AccessToken); //credentials. //Cognitocr using (var client = new AmazonS3Client(credentials, Amazon.RegionEndpoint.USEast2)) { //var response1 = // await client.ListBucketsAsync(new ListBucketsRequest()).ConfigureAwait(false); //client.DeleteBucketAsync("Kalle"); //client.ListVersionsAsync("ssdfsdf"); ListBucketsResponse response = await client.ListBucketsAsync(new ListBucketsRequest()).ConfigureAwait(false); foreach (S3Bucket bucket in response.Buckets) { Console.WriteLine(bucket.BucketName); } } }
public async Task DeleteAsync(IEnumerable <string> fullPaths, CancellationToken cancellationToken = default) { AmazonS3Client client = await GetClientAsync().ConfigureAwait(false); await Task.WhenAll(fullPaths.Select(fullPath => DeleteAsync(fullPath, client, cancellationToken))).ConfigureAwait(false); }
public S3Tests() { client = CreateAwsClient(); }
public S3Helper(string awsAccessKeyId, string awsSecretAccessKey, int maxDegreeOfParalelism = 2) { _maxDegreeOfParalelism = maxDegreeOfParalelism; _S3Client = new AmazonS3Client(awsAccessKeyId, awsSecretAccessKey); }
public static async Task SetupAsync(IWebApi webApi, string awsAccessKeyId, string awsSecretAccessKey, string awsTopicArn, string awsEndPoint, string awsBucketName, Func <string, string, string[], Task <string> > handler) { if (!string.IsNullOrEmpty(awsEndPoint)) { Uri endPointUri = new Uri(awsEndPoint); //logger.Debug($"SetupWebApi():endPointUri.PathAndQuery={endPointUri.PathAndQuery}"); using (AmazonSimpleNotificationServiceClient amazonSimpleNotificationServiceClient = new AmazonSimpleNotificationServiceClient(awsAccessKeyId, awsSecretAccessKey, Amazon.RegionEndpoint.USEast1)) { SubscribeResponse subscribeResponse = await amazonSimpleNotificationServiceClient.SubscribeAsync(new SubscribeRequest { TopicArn = awsTopicArn, Protocol = endPointUri.Scheme, Endpoint = awsEndPoint }); } AmazonS3Client amazonS3Client = new AmazonS3Client(Amazon.RegionEndpoint.USEast1); webApi.OnPost( endPointUri.PathAndQuery, async(req, res) => { logger.Debug($"{endPointUri.PathAndQuery}"); Dict evt = await req.ParseAsJsonAsync <Dict>(); logger.Debug($"{endPointUri.PathAndQuery},evt=" + JsonUtil.Serialize(evt)); string type = evt.GetAs("Type", (string)null); if (type == "SubscriptionConfirmation") { string subscribeUrl = evt.GetAs("SubscribeURL", (string)null); using (WebClient webClient = new WebClient()) { string result = await webClient.DownloadStringTaskAsync(new Uri(subscribeUrl)); logger.Debug($"{endPointUri.PathAndQuery},result=" + result); } } else if (type == "Notification") { //string messageId = evt.GetAs("MessageId", (string)null); string messageJson = evt.GetAs("Message", (string)null); logger.Debug($"{endPointUri.PathAndQuery},messageJson={messageJson}"); Dict message = JsonUtil.Deserialize <Dict>(messageJson); Dict mail = message.GetAs("mail", (Dict)null); Dict[] headers = mail.GetAs("headers", (Dict[])null); Dict inReplyToHeader = Array.Find(headers, x => x.GetAs("name", "") == "In-Reply-To"); string inReplyTo = inReplyToHeader.GetAs("value", ""); logger.Debug($"{endPointUri.PathAndQuery},inReplyTo={inReplyTo}"); Match match = IN_REPLY_TO_REGEX.Match(inReplyTo); if (match.Success) { string sentMessageId = match.Groups[1].Value; string bucketKey = mail.GetAs("messageId", (string)null); logger.Debug($"{endPointUri.PathAndQuery},sentMessageId={sentMessageId},bucketKey={bucketKey}"); if (!string.IsNullOrEmpty(bucketKey)) { GetObjectResponse getObjectResponse = await amazonS3Client.GetObjectAsync(new GetObjectRequest { BucketName = awsBucketName, Key = bucketKey }); logger.Debug($"{endPointUri.PathAndQuery},getObjectResponse={getObjectResponse}"); MimeMessage mimeMessage = await MimeMessage.LoadAsync(getObjectResponse.ResponseStream); logger.Debug($"{endPointUri.PathAndQuery},mimeMessage={mimeMessage}"); await handler(sentMessageId, mimeMessage.TextBody, new string[] { }); } } } } ); } }
public static async Task <IActionResult> ListingObjectsAsync(HttpRequest req, TraceWriter log) { // Reference: https://docs.aws.amazon.com/AmazonS3/latest/dev/ListingObjectKeysUsingNetSDK.html var bucketName = Environment.GetEnvironmentVariable("S3BucketName", EnvironmentVariableTarget.Process); var bucketRegion = RegionEndpoint.GetBySystemName(Environment.GetEnvironmentVariable("S3BucketRegion", EnvironmentVariableTarget.Process)); if (awsSecretKey == null) { log.Info($"Fetching AWS secret key for the first time from KeyVault..."); var azureServiceTokenProvider = new AzureServiceTokenProvider(); var keyVaultClient = new KeyVaultClient(new KeyVaultClient.AuthenticationCallback(azureServiceTokenProvider.KeyVaultTokenCallback)); var secretName = Environment.GetEnvironmentVariable("AmazonS3SecretAccessKeySecretName", EnvironmentVariableTarget.Process); var azureKeyVaultUrl = Environment.GetEnvironmentVariable("AzureKeyVaultUrl", EnvironmentVariableTarget.Process); var secret = await keyVaultClient.GetSecretAsync($"{azureKeyVaultUrl}secrets/{secretName}").ConfigureAwait(false); awsSecretKey = secret.Value; log.Info("[Setting]: Successfully fetched AWS secret key from KeyVault."); } var credentials = new Amazon.Runtime.BasicAWSCredentials(Environment.GetEnvironmentVariable("AwsAccessKey", EnvironmentVariableTarget.Process), awsSecretKey); string s3BucketLastProcessedDateTimeUtcAsString = req.Query["s3BucketLastProcessedDateTimeUtc"]; // GET string requestBody = new StreamReader(req.Body).ReadToEnd(); // POST dynamic data = JsonConvert.DeserializeObject(requestBody); s3BucketLastProcessedDateTimeUtcAsString = s3BucketLastProcessedDateTimeUtcAsString ?? data?.s3BucketLastProcessedDateTimeUtc; if (string.IsNullOrWhiteSpace(s3BucketLastProcessedDateTimeUtcAsString)) { string errorMessage = "A 's3BucketLastProcessedDateTimeUtc' querystring parameter or a request body containing a JSON object with a 's3BucketLastProcessedDateTimeUtc' property was expected but not found."; log.Info(errorMessage); return(new BadRequestObjectResult(errorMessage)); } var s3BucketLastProcessedDateTimeUtc = DateTime.Parse(s3BucketLastProcessedDateTimeUtcAsString); log.Info($"Bucket Name: {bucketName}."); log.Info($"Bucket Region: {bucketRegion}."); log.Info($"S3 Bucket Last Processed DateTimeUtc: {s3BucketLastProcessedDateTimeUtcAsString}."); List <S3Object> filteredObjects = new List <S3Object>(); int totalUnfilteredCount = 0; int currentUnfilteredCount = 0; DateTime newLastProcessedDateTimeUtc = DateTime.UtcNow; IAmazonS3 client = new AmazonS3Client(credentials, bucketRegion); try { ListObjectsV2Request request = new ListObjectsV2Request { BucketName = bucketName }; ListObjectsV2Response response; do { response = await client.ListObjectsV2Async(request); currentUnfilteredCount = response.S3Objects.Count; totalUnfilteredCount += currentUnfilteredCount; log.Info($"Results Count (pre-filtering): {currentUnfilteredCount}."); var currentFilteredObjects = response.S3Objects.FindAll((s3Object) => { // Return objects updated after the last process date and that are not folder records (end with _$folder$ and have 0 size). return(DateTime.Compare(s3Object.LastModified.ToUniversalTime(), s3BucketLastProcessedDateTimeUtc) > 0 & !(s3Object.Key.EndsWith("_$folder$", StringComparison.InvariantCulture) && s3Object.Size == 0)); }); log.Info($"Results Count (post-filtering): {currentFilteredObjects.Count}."); filteredObjects.AddRange(currentFilteredObjects); log.Info($"Next Continuation Token: {response.NextContinuationToken}."); request.ContinuationToken = response.NextContinuationToken; } while (response.IsTruncated); log.Info($"Results Count (total-unfiltered): {totalUnfilteredCount}."); log.Info($"Results Count (total-filtered): {filteredObjects.Count}."); dynamic payload = new System.Dynamic.ExpandoObject(); payload.s3Objects = filteredObjects; payload.newLastProcessedDateTimeUtc = newLastProcessedDateTimeUtc.ToString(); return(new OkObjectResult(JsonConvert.SerializeObject(payload, Formatting.Indented))); } catch (AmazonS3Exception amazonS3Exception) { log.Info($"AmazonS3Exception [ListingObjectsAsync]: {amazonS3Exception.ToString()}."); return(new BadRequestObjectResult("Operation failed (AmazonS3Exception). Check function's log for details.")); } catch (Exception exception) { log.Info($"Exception [ListingObjectsAsync]: {exception.ToString()}."); return(new BadRequestObjectResult("Operation failed. Check function's log for details.")); } }
private void btnScript_Click(object sender, EventArgs e) { set2Config(); u_config.printConfig(@"C:\Wavelets decomposition\config.txt", null); // Create a client AmazonS3Config confisS3 = new AmazonS3Config { ProxyHost = null }; TimeSpan timeOUT = new TimeSpan(3, 0, 0); confisS3.ReadWriteTimeout = timeOUT; confisS3.Timeout = timeOUT; AmazonS3Client client = new AmazonS3Client(confisS3); UseS3 = UseS3CB.Checked; rumPrallel = rumPrallelCB.Checked; runBoosting = runBoostingCB.Checked; runProoning = runProoningCB.Checked; runBoostingProoning = runBoostingProoningCB.Checked; runRFProoning = runRFProoningCB.Checked; runRf = runRfCB.Checked; runBoostingLearningRate = runBoostingLearningRateCB.Checked; bucketName = bucketTB.Text; string results_path = @ResultsTB.Text; string db_path = @DBTB.Text + "\\";//@"C:\Users\Administrator\Dropbox\ADA\ada_valid\"; //"D:\\Phd\\Shai\\code\\tests\\helix tests\\noise_5\\noise_5\\"; // "C:\\reasearch\\tests\\lena\\"; //get dir MainFolderName = results_path; if (!UseS3) { if (!Directory.Exists(MainFolderName)) { Directory.CreateDirectory(MainFolderName); } } if (UseS3) { S3DirectoryInfo s3results_path = new S3DirectoryInfo(client, bucketName, results_path); if (!s3results_path.Exists) { s3results_path.Create(); } //set archive path //S3DirectoryInfo s3archive_path = new S3DirectoryInfo(client, bucketName, results_path + "\\archive"); //if (!s3archive_path.Exists) // s3archive_path.Create(); } //READ DATA DB db = new DB(); db.training_dt = db.getDataTable(db_path + "trainingData.txt"); db.testing_dt = db.getDataTable(db_path + "testingData.txt"); db.validation_dt = db.getDataTable(db_path + "ValidData.txt"); db.training_label = db.getDataTable(db_path + "trainingLabel.txt"); db.testing_label = db.getDataTable(db_path + "testingLabel.txt"); db.validation_label = db.getDataTable(db_path + "ValidLabel.txt"); //db.training_label = db.getDataTable(db_path + "trainingLabel" + t.ToString() + ".txt"); upper_label = db.training_label.Max(); lower_label = db.training_label.Min(); double trainingPercent = double.Parse(trainingPercentTB.Text); // 0.02; long rowToRemoveFrom = Convert.ToInt64(db.training_dt.Count() * trainingPercent); db.training_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.training_label = db.training_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_dt = db.testing_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_label = db.testing_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_label = db.validation_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); ////REDUCE DIM, GLOBAL PCA if (usePCA.Checked) { DimReduction dimreduction = new DimReduction(db.training_dt); db.PCAtraining_dt = dimreduction.getGlobalPca(db.training_dt); db.PCAtesting_dt = dimreduction.getGlobalPca(db.testing_dt); db.PCAvalidation_dt = dimreduction.getGlobalPca(db.validation_dt); //printtable(db.PCAtraining_dt, MainFolderName + "\\PCAtraining_dt.txt");//dbg //printtable(db.PCAtesting_dt, MainFolderName + "\\PCAtesting_dt.txt");//dbg } else { //de-activate pca for dbg db.PCAtraining_dt = db.training_dt; db.PCAtesting_dt = db.testing_dt; db.PCAvalidation_dt = db.validation_dt; } db.PCAtraining_GridIndex_dt = new long[db.PCAtraining_dt.Count()][]; for (int i = 0; i < db.PCAtraining_dt.Count(); i++) { db.PCAtraining_GridIndex_dt[i] = new long[db.PCAtraining_dt[i].Count()]; } //BOUNDING BOX AND MAIN GRID boundingBox = db.getboundingBox(db.PCAtraining_dt); MainGrid = db.getMainGrid(db.PCAtraining_dt, boundingBox, ref db.PCAtraining_GridIndex_dt); //READ CONFIG methodConfig mc = new methodConfig(true); int Nloops = int.Parse(NloopsTB.Text) - 1; int Kfolds = 0; if (int.TryParse(croosValidTB.Text, out Kfolds)) { Nloops = Kfolds - 1; } for (int k = 0; k < Nloops; k++) { mc.boostlamda_0.Add(3.8); // - create variant in number of pixels } //mc.boostlamda_0.Add(1500);// - create variant in number of pixels //mc.boostlamda_0.Add(2500);// - create variant in number of pixels //mc.boostlamda_0.Add(3000);// - create variant in number of pixels mc.generateRecordConfigArr(); for (int k = 0; k < mc.recArr.Count(); k++) { mc.recArr[k].dim = NfeaturesTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesTB.Text, k)); mc.recArr[k].approxThresh = double.Parse(evaluateString(approxThreshTB.Text, k)); // 0.1; mc.recArr[k].partitionErrType = int.Parse(evaluateString(partitionTypeTB.Text, k)); //2; mc.recArr[k].minWaveSize = int.Parse(evaluateString(minNodeSizeTB.Text, k)); //1;//CHANGE AFTER DBG mc.recArr[k].hopping_size = int.Parse(evaluateString(waveletsSkipEstimationTB.Text, k)); //25;// 10 + 5 * (k + 1);// +5 * (k % 10);// 1;//25; mc.recArr[k].test_error_size = double.Parse(evaluateString(waveletsPercentEstimationTB.Text, k)); // +0.05 * (k % 10);// 1;// 0.1;//percent of waves to check mc.recArr[k].NskipsinKfunc = double.Parse(evaluateString(boostingKfuncPercentTB.Text, k)); // 0.0025; mc.recArr[k].rfBaggingPercent = double.Parse(evaluateString(bagginPercentTB.Text, k)); // 0.6; mc.recArr[k].rfNum = int.Parse(evaluateString(NrfTB.Text, k)); // k + 1;//10 + k*10;// 100 / (k + 46) * 2;// int.Parse(Math.Pow(10, k + 1).ToString()); mc.recArr[k].boostNum = int.Parse(evaluateString(NboostTB.Text, k)); // 10; mc.recArr[k].boostProoning_0 = int.Parse(evaluateString(NfirstPruninginBoostingTB.Text, k)); //13 mc.recArr[k].boostlamda_0 = double.Parse(evaluateString(boostingLamda0TB.Text, k)); // 0.01 - (k + 1) * 0.001; //0.05;// 0.0801 + k * 0.001;// Math.Pow(0.1, k);// 0.22 + k*0.005; mc.recArr[k].NwaveletsBoosting = int.Parse(evaluateString(NfirstwaveletsBoostingTB.Text, k)); // 4;// k + 1; //mc.recArr[k].learningRate = 0;// 0.01; mc.recArr[k].boostNumLearningRate = int.Parse(evaluateString(NboostingLearningRateTB.Text, k)); // 55;// 18; mc.recArr[k].percent_training_db = trainingPercent; mc.recArr[k].BoundLevel = int.Parse(evaluateString(boundLevelTB.Text, k)); //1024; mc.recArr[k].NDimsinRF = NfeaturesrfTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesrfTB.Text, k)); mc.recArr[k].split_type = int.Parse(evaluateString(splitTypeTB.Text, k)); //0 mc.recArr[k].NormLPType = int.Parse(evaluateString(errTypeEstimationTB.Text, k)); mc.recArr[k].RFpruningTestRange[1] = int.Parse(evaluateString(RFpruningEstimationRange1TB.Text, k)); // 12;// k + 9; mc.recArr[k].boundDepthTree = int.Parse(evaluateString(boundDepthTB.Text, k)); //1024; mc.recArr[k].CrossValidFold = k; // 2m0rr0w2 save labels dim in confif mc.recArr[k].labelDim = db.training_label[0].Count(); //mc.recArr[k].boostNum = t ;// tmp to delete !!!!!!! //mc.recArr[k].RFwaveletsTestRange[0] = 25; //mc.recArr[k].RFwaveletsTestRange[1] = 50; } //create dirs foreach (recordConfig t in mc.recArr) { if (!UseS3 && !Directory.Exists(MainFolderName + "\\" + t.getShortName())) { Directory.CreateDirectory(MainFolderName + "\\" + t.getShortName()); StreamWriter sw = new StreamWriter(MainFolderName + "\\" + t.getShortName() + "\\record_properties.txt", false); sw.WriteLine(t.getFullName()); sw.Close(); u_config.printConfig(MainFolderName + "\\config.txt", null); } if (!UseS3) { continue; } S3DirectoryInfo s3results_path_with_folders = new S3DirectoryInfo(client, bucketName, results_path + "\\" + t.getShortName()); if (!s3results_path_with_folders.Exists) { s3results_path_with_folders.Create(); S3FileInfo outFile = s3results_path_with_folders.GetFile("record_properties.txt"); StreamWriter sw = new StreamWriter(outFile.OpenWrite()); sw.WriteLine(t.getFullName()); sw.Close(); S3FileInfo configFile = s3results_path_with_folders.GetFile("config.txt"); u_config.printConfig("", configFile); } } //SET ID ARRAY LIST List <int> trainingID = Enumerable.Range(0, db.PCAtraining_dt.Count()).ToList(); List <int> testingID = Enumerable.Range(0, db.PCAtesting_dt.Count()).ToList(); //cross validation List <List <int> > trainingFoldId = new List <List <int> >(); List <List <int> > testingFoldId = new List <List <int> >(); Random ran = new Random(2); List <int> training_rand = trainingID.OrderBy(x => ran.Next()).ToList().GetRange(0, trainingID.Count); //THE LARGEST GROUP IS TRAINING if (int.TryParse(croosValidTB.Text, out Kfolds)) { createCrossValid(Kfolds, training_rand, trainingFoldId, testingFoldId); } //bounding intervals int[][] BB = new int[2][]; BB[0] = new int[boundingBox[0].Count()]; BB[1] = new int[boundingBox[0].Count()]; for (int i = 0; i < boundingBox[0].Count(); i++) { BB[1][i] = MainGrid[i].Count() - 1; //set last index in each dim } for (int i = 0; i < mc.recArr.Count; i++) { Analizer Analizer = new Analizer(MainFolderName + "\\" + mc.recArr[i].getShortName(), MainGrid, db, mc.recArr[i]); if (!croosValidCB.Checked) { Analizer.analize(trainingID, testingID, BB); } else { Analizer.analize(trainingFoldId[i], testingFoldId[i], BB); //cross validation } } btnScript.BackColor = Color.Green; }
public AwsS3DirectoryBrowser(AmazonS3Client client, string bucketName) { _client = client; _bucketName = bucketName; }
public static async Task LoadAsync() { var masterDataPath = new MasterDataPath(); if (false == await masterDataPath.FetchAsync()) { Logger.Logging(new LogObj().AddChild(new LogModels.ErrorReport { Msg = "Not found MasterDataPath@Kvs!", })); return; } if ((LatestMasterData != null && LatestMasterData.VersionStr == masterDataPath.Model.version) && masterDataPath.Model.updateDate == LatestMasterDataUpdateDate) { // Already Loaded return; } var path = ""; var rawJson = ""; if (!string.IsNullOrEmpty(masterDataPath.Model.s3KeyPlain)) { try { path = $"{DeliveryDataInfo.S3BucketName}/{masterDataPath.Model.s3KeyPlain}"; Logger.Logging(new LogObj().AddChild(new LogModels.MasterDataStartLoading { Path = path, UpdateDate = masterDataPath.Model.updateDate, Date = DateTime.UtcNow, })); var s3Client = new AmazonS3Client( Amazon.RegionEndpoint.GetBySystemName(DeliveryDataInfo.S3BucketRegion) ); var getReq = new GetObjectRequest { BucketName = DeliveryDataInfo.S3BucketName, Key = masterDataPath.Model.s3KeyPlain, }; using (var getRes = await s3Client.GetObjectAsync(getReq)) { using (var reader = new System.IO.StreamReader(getRes.ResponseStream)) { rawJson = await reader.ReadToEndAsync(); } } } catch (Exception ex) { return; } } else { path = $"{masterDataPath.Model.pathPlain}"; Logger.Logging(new LogObj().AddChild(new LogModels.MasterDataStartLoading { Path = path, UpdateDate = masterDataPath.Model.updateDate, Date = DateTime.UtcNow, })); var requester = new JsonSrcRequester(); var response = await requester.GetAsync(masterDataPath.Model.pathPlain); if (response.StatusCode != System.Net.HttpStatusCode.OK) { Logger.Logging(new LogObj().AddChild(new LogModels.ErrorReport { Msg = $"Not found JsonFile at \"{masterDataPath.Model.pathPlain}\".", })); return; } rawJson = response.Payload; } var masterData = await ParseAsync( rawJson, masterDataPath.Model.pathEncrypt, masterDataPath.Model.pathPlain); LatestMasterData = masterData; LatestMasterDataUpdateDate = masterDataPath.Model.updateDate; Logger.Logging(new LogObj().AddChild(new LogModels.MasterDataEndLoading { Path = path, UpdateDate = masterDataPath.Model.updateDate, Date = DateTime.UtcNow, })); }
public S3UnitOfWorkFileWriter(AmazonS3Client client, string bucket) { _Client = client; _Bucket = bucket; }
public S3StorageService() { s3Client = new AmazonS3Client(accessKey, secretKey, RegionEndpoint.USEast2); }
public ActionResult Settings(user model) { user user = db.users.FirstOrDefault(x => x.UserName == User.Identity.Name); if (user == null) return RedirectToAction("Index", "Home"); if (Request.Files != null && Request.Files.Count > 0) { foreach (string keyname in Request.Files) { HttpPostedFileBase file = Request.Files[keyname]; if (file != null && file.ContentLength > 0 && !string.IsNullOrEmpty(file.FileName)) { //file upload string ext = Path.GetExtension(file.FileName).ToLower(); if (ext != ".png" && ext != ".jpg" && ext != ".jpeg" && ext != ".swf" && ext != ".fla") { ModelState.AddModelError(keyname, "Invalid file type"); } else { try { using (Image tmp = Image.FromStream(file.InputStream)) { //resize+crop int width = int.Parse(ConfigurationManager.AppSettings["Image_User_Width"]); int height = int.Parse(ConfigurationManager.AppSettings["Image_User_Height"]); string name = getGuid() + ".jpg"; string filepath = string.Format("users/{0}", name); string address = ConfigurationManager.AppSettings["AWSS3BucketUrl"] + filepath; //send using (Image resized = tmp.GetResizedImage(width, height, true)) { var request = new PutObjectRequest().WithBucketName(ConfigurationManager.AppSettings["AWSS3Bucket"]).WithKey(filepath); using (MemoryStream buffer = new MemoryStream()) { resized.Save(buffer, ImageHelper.GetJpgEncoder(), ImageHelper.GetJpgEncoderParameters(80)); request.InputStream = buffer; AmazonS3Client s3Client = new AmazonS3Client(); s3Client.PutObject(request); } } ModelState.Remove(keyname); ModelState.Add(keyname, new ModelState()); ModelState.SetModelValue(keyname, new ValueProviderResult(address, address, null)); user.Image = address; } } catch (Exception ex) { ModelState.AddModelError(keyname, "Upload error: " + ex.Message); } } ViewBag.CountryID = new SelectList(db.countries, "CountryID", "Name", user.CountryID); return View(user); } } } if (ModelState.IsValid) { user.Name = model.Name; user.Lastname = model.Lastname; user.Bio = model.Bio; user.Image = model.Image; user.CountryID = model.CountryID; user.Contact_Blog = model.Contact_Blog; user.Contact_Email = model.Contact_Email; user.Contact_Facebook = model.Contact_Facebook; user.Contact_Linkedin = model.Contact_Linkedin; user.Contact_Skype = model.Contact_Skype; user.Contact_Tel = model.Contact_Tel; user.Contact_Twitter = model.Contact_Twitter; user.Contact_Web = model.Contact_Web; db.ObjectStateManager.ChangeObjectState(user, EntityState.Modified); db.SaveChanges(); //clear OutputCacheAttribute.ChildActionCache = new MemoryCache("newcache"); } ViewBag.CountryID = new SelectList(db.countries, "CountryID", "Name", user.CountryID); return View(user); }
public async Task ProcessBuildAsync(string solutionName, string sourceKey, string sourceBucket, string destinationBucket) { var tempPath = Path.Combine(Path.GetTempPath(), solutionName.Replace(".", "")); if (!Directory.Exists(tempPath)) { Directory.CreateDirectory(tempPath); } DeleteDirectoryContents(tempPath); using var zipUtil = new ZipUtilities(); var archive = await zipUtil.GetZipArchiveAsync(sourceBucket, sourceKey); foreach (var entry in archive.Entries.Distinct()) { if (entry.Name.Length <= 0) { continue; } if (entry.FullName.ToLower().Contains("/runtimes/")) { var runtimesPath = entry.FullName.Split("/runtimes/")[1]; var fullPath = Path.Combine(tempPath, "runtimes", runtimesPath); var directoryName = Path.GetDirectoryName(fullPath); if (!Directory.Exists(directoryName)) { Directory.CreateDirectory(directoryName); } entry.ExtractToFile(fullPath); } else { entry.ExtractToFile(Path.Combine(tempPath, entry.Name)); } } var version = await zipUtil.GetVersionFromBuildInfoAsync(sourceBucket, sourceKey); foreach (var template in Directory.EnumerateFiles(tempPath, "*.template")) { InjectVersionIntoTemplate(template, version); } var zipFileName = $"{solutionName}.{version}.zip"; var zipFullname = Path.Combine(tempPath, zipFileName); using Ionic.Zip.ZipFile zip = new Ionic.Zip.ZipFile(); zip.AddDirectory(tempPath); zip.Save(zipFullname); using var s3Client = new AmazonS3Client(RegionEndpoint.USEast1); await s3Client.PutObjectAsync(new PutObjectRequest { BucketName = destinationBucket, Key = $"{solutionName}/{version}/{zipFileName}", FilePath = zipFullname }); }
public void AclSamples() { { #region PutACL Sample 1 // Create a client AmazonS3Client client = new AmazonS3Client(); // Set Canned ACL (PublicRead) for an existing item client.PutACL(new PutACLRequest { BucketName = "SampleBucket", Key = "Item1", CannedACL = S3CannedACL.PublicRead }); // Set Canned ACL (PublicRead) for an existing item // (This reverts ACL back to default for object) client.PutACL(new PutACLRequest { BucketName = "SampleBucket", Key = "Item1", CannedACL = S3CannedACL.Private }); #endregion } { #region GetACL\PutACL Samples // Create a client AmazonS3Client client = new AmazonS3Client(); // Retrieve ACL for object S3AccessControlList acl = client.GetACL(new GetACLRequest { BucketName = "SampleBucket", Key = "Item1", }).AccessControlList; // Retrieve owner Owner owner = acl.Owner; // Describe grant S3Grant grant = new S3Grant { Grantee = new S3Grantee { EmailAddress = "*****@*****.**" }, Permission = S3Permission.WRITE_ACP }; // Create new ACL S3AccessControlList newAcl = new S3AccessControlList { Grants = new List <S3Grant> { grant }, Owner = owner }; // Set new ACL PutACLResponse response = client.PutACL(new PutACLRequest { BucketName = "SampleBucket", Key = "Item1", AccessControlList = acl }); #endregion } }
/// <summary> /// Default constructor. This constructor is used by Lambda to construct the instance. When invoked in a Lambda environment /// the AWS credentials will come from the IAM role associated with the function and the AWS region will be set to the /// region the Lambda function is executed in. /// </summary> public Function() { S3Client = new AmazonS3Client(); SnsClient = new AmazonSimpleNotificationServiceClient(); IamClient = new AmazonIdentityManagementServiceClient(); }
public ActionResult Create(Event imageDB) { // Upload event image path to database// string filename = Path.GetFileNameWithoutExtension(imageDB.ImageFile.FileName); string extension = Path.GetExtension(imageDB.ImageFile.FileName); filename = filename + DateTime.Now.ToString("yymmssfff") + extension; string keyName = filename; string aws_s2 = "https://tech-events-uk.s3.eu-west-2.amazonaws.com/".ToString(); imageDB.ImagePath = aws_s2 + filename; //Upload image to AWS S3 ------------// HttpPostedFileBase file = Request.Files[0]; var s3Client = new AmazonS3Client(accesskey, secretkey, bucketRegion); var fileTransferUtility = new TransferUtility(s3Client); var fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = bucketName, InputStream = file.InputStream, StorageClass = S3StorageClass.StandardInfrequentAccess, PartSize = 6291456, // 6 MB. Key = keyName, CannedACL = S3CannedACL.PublicRead }; fileTransferUtilityRequest.Metadata.Add("param1", "Value1"); fileTransferUtilityRequest.Metadata.Add("param2", "Value2"); fileTransferUtility.Upload(fileTransferUtilityRequest); fileTransferUtility.Dispose(); //Coverts postcode to latitude and longitude and upload to database// string requestUri = string.Format("https://maps.googleapis.com/maps/api/geocode/xml?key={1}&address={0}&sensor=false", Uri.EscapeDataString(imageDB.Postcode), "AIzaSyBj8k95-RJyz0HNan_RcgS_-suLQVb7NzA"); WebRequest request = WebRequest.Create(requestUri); WebResponse response = request.GetResponse(); XDocument xdoc = XDocument.Load(response.GetResponseStream()); XElement result = xdoc.Element("GeocodeResponse").Element("result"); XElement locationElement = result.Element("geometry").Element("location"); XElement lat = locationElement.Element("lat"); XElement lng = locationElement.Element("lng"); imageDB.Latitude = Convert.ToDouble(lat.Value); imageDB.Longitude = Convert.ToDouble(lng.Value); { db.Event.Add(imageDB); db.SaveChanges(); return(RedirectToAction("Index")); } }
public S3Uploader() { client = new AmazonS3Client("key", "secret", Amazon.RegionEndpoint.USEast1); }
public List <int> UploadMealPhotos(HttpFileCollection hfc, int mealId, int userId) { List <int> photoIds = null; AmazonS3Client client = new AmazonS3Client(_siteConfigService.AwsAccessKey, _siteConfigService.AwsSecretKey, Amazon.RegionEndpoint.USWest2); TransferUtility fileTransferUtility = new TransferUtility(client); List <string> photoFiles = null; for (int i = 0; i < hfc.Count; i++) { HttpPostedFile hpf = hfc[i]; TransferUtilityUploadRequest fileTransferUtilityRequest = new TransferUtilityUploadRequest { BucketName = ExistingBucketName, InputStream = hpf.InputStream, //where you get the stream Key = KeyName + "/" + Guid.NewGuid().ToString() + Path.GetFileName(hpf.FileName), }; fileTransferUtility.Upload(fileTransferUtilityRequest); if (photoFiles == null) { photoFiles = new List <string>(); } photoFiles.Add(fileTransferUtilityRequest.Key); } Action <SqlParameterCollection> inputParamDelegate = delegate(SqlParameterCollection paramCollection) { paramCollection.AddWithValue("@MealId", mealId); paramCollection.AddWithValue("@UserId", userId); SqlParameter filesParam = new SqlParameter("@Files", SqlDbType.Structured); if (photoFiles != null && photoFiles.Any()) { NVarcharTable fileNames = new NVarcharTable(photoFiles); filesParam.Value = fileNames; } paramCollection.Add(filesParam); }; Action <IDataReader, short> singleRecMapper = delegate(IDataReader reader, short set) { if (photoIds == null) { photoIds = new List <int>(); } photoIds.Add(reader.GetSafeInt32(0)); }; _dataProvider.ExecuteCmd("dbo.Files_MealsPhoto_Insert", inputParamDelegate, singleRecMapper); return(photoIds); }
public async Task DeployStepFunctionWithTemplateSubstitution() { var cfClient = new AmazonCloudFormationClient(RegionEndpoint.USEast2); var s3Client = new AmazonS3Client(RegionEndpoint.USEast2); var bucketName = "deploy-step-functions-" + DateTime.Now.Ticks; await s3Client.PutBucketAsync(bucketName); try { var logger = new TestToolLogger(); var assembly = this.GetType().GetTypeInfo().Assembly; var fullPath = Path.GetFullPath(Path.GetDirectoryName(assembly.Location) + "../../../../../../testapps/TemplateSubstitutionTestProjects/StateMachineDefinitionStringTest"); var command = new DeployServerlessCommand(logger, fullPath, new string[0]); command.DisableInteractive = true; command.Configuration = "Release"; command.TargetFramework = "netcoreapp1.0"; command.StackName = "DeployStepFunctionWithTemplateSubstitution-" + DateTime.Now.Ticks; command.S3Bucket = bucketName; command.WaitForStackToComplete = true; command.TemplateParameters = new Dictionary <string, string> { { "NonExisting", "Parameter" }, { "StubParameter", "SecretFoo" } }; var created = await command.ExecuteAsync(); try { Assert.True(created); var describeResponse = await cfClient.DescribeStacksAsync(new DescribeStacksRequest { StackName = command.StackName }); Assert.Equal(StackStatus.CREATE_COMPLETE, describeResponse.Stacks[0].StackStatus); Assert.DoesNotContain("SecretFoo", logger.Buffer.ToString()); Assert.Contains("****", logger.Buffer.ToString()); } finally { if (created) { try { var deleteCommand = new DeleteServerlessCommand(new ConsoleToolLogger(), fullPath, new string[0]); deleteCommand.StackName = command.StackName; await deleteCommand.ExecuteAsync(); } catch { // Bury exception because we don't want to lose any exceptions during the deploy stage. } } } } finally { await AmazonS3Util.DeleteS3BucketWithObjectsAsync(s3Client, bucketName); } }
//--- Class Methods --- public static async Task <(ITypeSystem TypeSystem, bool Cached)> LoadCloudFormationSpecificationAsync( string cacheDirecotry, string region, bool forceRefresh, Action <string>?log ) { if (region is null) { throw new ArgumentNullException(nameof(region)); } var cached = false; // check if we already have a CloudFormation specification downloaded var cloudFormationSpecFile = Path.Combine(cacheDirecotry, "AWS", region, "CloudFormationResourceSpecification.json.br"); var exists = File.Exists(cloudFormationSpecFile); var modifiedSince = exists ? File.GetLastWriteTimeUtc(cloudFormationSpecFile) : DateTime.MinValue; var now = DateTime.UtcNow; // check if we have to refresh, if CloudFormation specification doesn't exist, or if it's too old if (forceRefresh || !exists || (modifiedSince.AddDays(1) <= now)) { // fetch new CloudFormation specification, but only if it has been modified var cloudFormationSpecificationKey = $"AWS/{region}/CloudFormationResourceSpecification.json.br"; var s3ClientUSEast1 = new AmazonS3Client(RegionEndpoint.USEast1); try { var response = await s3ClientUSEast1.GetObjectAsync(new GetObjectRequest { BucketName = "lambdasharp", Key = cloudFormationSpecificationKey, RequestPayer = RequestPayer.Requester, ModifiedSinceDateUtc = modifiedSince }); log?.Invoke("downloading new CloudFormation specification"); // write new CloudFormation specification Directory.CreateDirectory(Path.GetDirectoryName(cloudFormationSpecFile)); using (var outputStream = File.OpenWrite(cloudFormationSpecFile)) { await response.ResponseStream.CopyToAsync(outputStream); } // check if we need to update the LambdaSharp developer copy var lambdaSharpDirectory = Environment.GetEnvironmentVariable("LAMBDASHARP"); if (lambdaSharpDirectory != null) { log?.Invoke("updating LambdaSharp contributor CloudFormation specification"); using var specFile = File.OpenRead(cloudFormationSpecFile); using var decompressionStream = new BrotliStream(specFile, CompressionMode.Decompress); var document = await JsonSerializer.DeserializeAsync <object>(decompressionStream); await File.WriteAllTextAsync(Path.Combine(lambdaSharpDirectory, "src", "CloudFormationResourceSpecification.json"), JsonSerializer.Serialize(document, new JsonSerializerOptions { WriteIndented = true, IgnoreNullValues = true, Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping })); } } catch (AmazonS3Exception e) when( (e.InnerException is Amazon.Runtime.Internal.HttpErrorResponseException httpException) && (httpException.Response.StatusCode == HttpStatusCode.NotModified) ) { log?.Invoke("CloudFormation specification is up-to-date"); cached = true; // touch CloudFormation specification to avoid check until is expires again in 24 hours File.SetLastWriteTimeUtc(cloudFormationSpecFile, now); } } else { cached = true; } // load CloudFormation specification using (var stream = File.OpenRead(cloudFormationSpecFile)) { using var compression = new BrotliStream(stream, CompressionMode.Decompress); var specification = CloudFormationTypeSystem.LoadFromAsync(region, compression).GetAwaiter().GetResult(); log?.Invoke($"using CloudFormation specification v{specification.Version}"); return(TypeSystem : specification, Cached : cached); } }
public virtual async Task <IStoredItemInformation> GetInformationAsync(string storagePointer, CancellationToken cancellationToken = default) { try { var result = new StoredItemInformation(); using (var s3Client = new AmazonS3Client(GetAmazonCredentials(), GetBucketInfo().GetRegionEndpoint())) { try { var omInfo = await s3Client .GetObjectMetadataAsync(GetBucketInfo().Name, storagePointer, cancellationToken) .ConfigureAwait(false); result.StoredType = StoredItemType.File; result.Size = omInfo.ContentLength; result.LastModifiedTimestamp = omInfo.LastModified.ToUniversalTime(); result.StoragePointer = storagePointer; } catch (AmazonS3Exception s3Exception) when(s3Exception.StatusCode == HttpStatusCode.NotFound) { storagePointer = storagePointer.EndsWith(DirectorySeparatorCharacter.ToString()) ? storagePointer : storagePointer + DirectorySeparatorCharacter; var loInfo = await s3Client .ListObjectsAsync(GetBucketInfo().Name, storagePointer, cancellationToken) .ConfigureAwait(false); if (!loInfo.S3Objects.Any()) { throw; } result.StoredType = StoredItemType.Directory; result.Size = null; } } var pathParts = storagePointer .Split(DirectorySeparatorCharacter) .Where(part => !string.IsNullOrWhiteSpace(part)) .ToList(); var name = pathParts.Last(); pathParts.Remove(name); var dirPath = string.Join(DirectorySeparatorCharacter.ToString(), pathParts); if (result.StoredType == StoredItemType.Directory) { name += DirectorySeparatorCharacter; } result.DirectoryPath = dirPath; result.Name = name; result.CreatedTimestamp = null; return(result); } catch (AmazonS3Exception s3Exception) when(s3Exception.StatusCode == HttpStatusCode.NotFound) { throw Errors.FileNotFoundException; } catch (Exception) { throw Errors.UnknownException; } }
public ActionResult CommentCreate(update_comment model) { update update = db.updates.Include("project").FirstOrDefault(x => x.UpdateID == model.UpdateID); project project = update.project; model.UserID = CurrentUser.UserID; model.Date = DateTime.Now; model.Picture = null; ModelState.Remove("UserID"); ModelState.Add("UserID", new ModelState()); ModelState.SetModelValue("UserID", new ValueProviderResult(CurrentUser.UserID, CurrentUser.UserID.ToString(), null)); ModelState.Remove("Date"); ModelState.Add("Date", new ModelState()); ModelState.SetModelValue("Date", new ValueProviderResult(DateTime.Now, DateTime.Now.ToString(), null)); if (Request.Files != null && Request.Files.Count > 0) { foreach (string keyname in Request.Files) { HttpPostedFileBase file = Request.Files[keyname]; if (file != null && file.ContentLength > 0 && !string.IsNullOrEmpty(file.FileName)) { //file upload string ext = Path.GetExtension(file.FileName).ToLower(); if (ext != ".png" && ext != ".jpg" && ext != ".jpeg" && ext != ".swf" && ext != ".fla") { ModelState.AddModelError(keyname, "Invalid file type"); } else { try { using (Image tmp = Image.FromStream(file.InputStream)) { //resize+crop int width = int.Parse(ConfigurationManager.AppSettings["Image_Comment_Width"]); int height = int.Parse(ConfigurationManager.AppSettings["Image_Comment_Height"]); string name = getTimestamp() + ".jpg"; string filepath = string.Format("projects/{0}/update/{1}/comments/{2}", update.ProjectID, update.UpdateID, file.FileName); string address = ConfigurationManager.AppSettings["AWSS3BucketUrl"] + filepath; //send using (Image resized = tmp.GetResizedImage(width, height, true)) { var request = new PutObjectRequest().WithBucketName(ConfigurationManager.AppSettings["AWSS3Bucket"]).WithKey(filepath); using (MemoryStream buffer = new MemoryStream()) { resized.Save(buffer, ImageHelper.GetJpgEncoder(), ImageHelper.GetJpgEncoderParameters(80)); request.InputStream = buffer; AmazonS3Client s3Client = new AmazonS3Client(); s3Client.PutObject(request); } } ModelState.Remove(keyname); ModelState.Add(keyname, new ModelState()); ModelState.SetModelValue(keyname, new ValueProviderResult(address, address, null)); model.Picture = address; } } catch (Exception ex) { ModelState.AddModelError(keyname, "Upload error: " + ex.Message); } } } } } if (ModelState.IsValid) { //akismet Validator validator = new Validator(ConfigurationManager.AppSettings["Akismet_Key"]); bool isspam = validator.IsSpam(new Comment() { comment_author_email = CurrentUser.Email, blog = ConfigurationManager.AppSettings["Akismet_Url"], comment_author = CurrentUser.UserName, comment_content = model.Text, user_agent = Request.UserAgent, user_ip = Request.UserHostAddress, referrer = Request.UrlReferrer != null ? Request.UrlReferrer.AbsoluteUri : "", comment_type = "comment" }); if (isspam) { string spam = string.Format("From: {0}\r\nProject: {1}\r\nProjectID: {2}\r\n\r\nComment SPAM TEXT: {3}\r\n\r\nThis post was not saved.", CurrentUser.Email, project.Title, update.ProjectID, model.Text); //SendMailAwsAdmin("New Comment SPAM: " + project.Title, spam); LogStuff("SPAM", DateTime.Now, spam); } else { update.update_comment.Add(model); db.SaveChanges(); string body = string.Format("From: {0}\r\nProject: {1}\r\nProjectID: {2}\r\n\r\nComment: {3}", CurrentUser.Email, project.Title, update.ProjectID, model.Text); SendMailAws(project.user.Email, "New Comment : " + project.Title, body); SendMailAwsAdmin("New Comment : " + project.Title, body); } } if (Request.UrlReferrer != null) { HttpResponse.RemoveOutputCacheItem(Request.UrlReferrer.AbsolutePath); return(Redirect(ReferrerUrlTimestamped() + "#comments-" + model.update.UpdateID)); } return(Redirect("/")); }
public void BucketSamples() { { #region ListBuckets Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Issue call ListBucketsResponse response = client.ListBuckets(); // View response data Console.WriteLine("Buckets owner - {0}", response.Owner.DisplayName); foreach (S3Bucket bucket in response.Buckets) { Console.WriteLine("Bucket {0}, Created on {1}", bucket.BucketName, bucket.CreationDate); } #endregion } { #region BucketPolicy Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Put sample bucket policy (overwrite an existing policy) string newPolicy = @"{ ""Statement"":[{ ""Sid"":""BasicPerms"", ""Effect"":""Allow"", ""Principal"": ""*"", ""Action"":[""s3:PutObject"",""s3:GetObject""], ""Resource"":[""arn:aws:s3:::samplebucketname/*""] }]}"; PutBucketPolicyRequest putRequest = new PutBucketPolicyRequest { BucketName = "SampleBucket", Policy = newPolicy }; client.PutBucketPolicy(putRequest); // Retrieve current policy GetBucketPolicyRequest getRequest = new GetBucketPolicyRequest { BucketName = "SampleBucket" }; string policy = client.GetBucketPolicy(getRequest).Policy; Console.WriteLine(policy); Debug.Assert(policy.Contains("BasicPerms")); // Delete current policy DeleteBucketPolicyRequest deleteRequest = new DeleteBucketPolicyRequest { BucketName = "SampleBucket" }; client.DeleteBucketPolicy(deleteRequest); // Retrieve current policy and verify that it is null policy = client.GetBucketPolicy(getRequest).Policy; Debug.Assert(policy == null); #endregion } { #region GetBucketLocation Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Construct request GetBucketLocationRequest request = new GetBucketLocationRequest { BucketName = "SampleBucket" }; // Issue call GetBucketLocationResponse response = client.GetBucketLocation(request); // View response data Console.WriteLine("Bucket location - {0}", response.Location); #endregion } { #region PutBucket Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Construct request PutBucketRequest request = new PutBucketRequest { BucketName = "SampleBucket", BucketRegion = S3Region.EU, // set region to EU CannedACL = S3CannedACL.PublicRead // make bucket publicly readable }; // Issue call PutBucketResponse response = client.PutBucket(request); #endregion } { #region DeleteBucket Sample 1 // Create a client AmazonS3Client client = new AmazonS3Client(); // Construct request DeleteBucketRequest request = new DeleteBucketRequest { BucketName = "SampleBucket" }; // Issue call DeleteBucketResponse response = client.DeleteBucket(request); #endregion } { #region DeleteBucket Sample 2 // Create a client AmazonS3Client client = new AmazonS3Client(); // List and delete all objects ListObjectsRequest listRequest = new ListObjectsRequest { BucketName = "SampleBucket" }; ListObjectsResponse listResponse; do { // Get a list of objects listResponse = client.ListObjects(listRequest); foreach (S3Object obj in listResponse.S3Objects) { // Delete each object client.DeleteObject(new DeleteObjectRequest { BucketName = "SampleBucket", Key = obj.Key }); } // Set the marker property listRequest.Marker = listResponse.NextMarker; } while (listResponse.IsTruncated); // Construct DeleteBucket request DeleteBucketRequest request = new DeleteBucketRequest { BucketName = "SampleBucket" }; // Issue call DeleteBucketResponse response = client.DeleteBucket(request); #endregion } { #region LifecycleConfiguration Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Put sample lifecycle configuration (overwrite an existing configuration) LifecycleConfiguration newConfiguration = new LifecycleConfiguration { Rules = new List <LifecycleRule> { // Rule to delete keys with prefix "Test-" after 5 days new LifecycleRule { Prefix = "Test-", Expiration = new LifecycleRuleExpiration { Days = 5 } }, // Rule to delete keys in subdirectory "Logs" after 2 days new LifecycleRule { Prefix = "Logs/", Expiration = new LifecycleRuleExpiration { Days = 2 }, Id = "log-file-removal" } } }; PutLifecycleConfigurationRequest putRequest = new PutLifecycleConfigurationRequest { BucketName = "SampleBucket", Configuration = newConfiguration }; client.PutLifecycleConfiguration(putRequest); // Retrieve current configuration GetLifecycleConfigurationRequest getRequest = new GetLifecycleConfigurationRequest { BucketName = "SampleBucket" }; LifecycleConfiguration configuration = client.GetLifecycleConfiguration(getRequest).Configuration; Console.WriteLine("Configuration contains {0} rules", configuration.Rules.Count); foreach (LifecycleRule rule in configuration.Rules) { Console.WriteLine("Rule"); Console.WriteLine(" Prefix = " + rule.Prefix); Console.WriteLine(" Expiration (days) = " + rule.Expiration.Days); Console.WriteLine(" Id = " + rule.Id); Console.WriteLine(" Status = " + rule.Status); } // Put a new configuration and overwrite the existing configuration configuration.Rules.RemoveAt(0); // remove first rule client.PutLifecycleConfiguration(putRequest); // Delete current configuration DeleteLifecycleConfigurationRequest deleteRequest = new DeleteLifecycleConfigurationRequest { BucketName = "SampleBucket" }; client.DeleteLifecycleConfiguration(deleteRequest); // Retrieve current configuration and verify that it is null configuration = client.GetLifecycleConfiguration(getRequest).Configuration; Debug.Assert(configuration == null); #endregion } }
static void ApplyCloudFormationChangeSetExample() { string bucket_Name = QSS3BucketName; string templateName = QSS3KeyPrefix + TdwUtils.cfClassPathBastionChangeSet.Replace("tdw_cf_template\\", ""); string stack_name = QSS3KeyPrefix + TdwUtils.cfClassPathBastion.Replace("tdw_cf_template\\", ""); stack_name = stack_name.Replace("-", ""); stack_name = stack_name.Replace(".template", ""); AmazonS3Client s3Client = new AmazonS3Client(); Amazon.CloudFormation.AmazonCloudFormationClient cfClient = new AmazonCloudFormationClient(); GetObjectRequest getObjectRequest = new GetObjectRequest { BucketName = bucket_Name, Key = templateName, }; string data = null; using (GetObjectResponse getObjectResponse = s3Client.GetObject(getObjectRequest)) { using (var stream = getObjectResponse.ResponseStream) using (var reader = new StreamReader(stream)) { data = reader.ReadToEnd(); } } List <string> CfCapabilities = new List <string>(); CfCapabilities.Add("CAPABILITY_IAM"); List <Amazon.CloudFormation.Model.Parameter> parameters = new List <Amazon.CloudFormation.Model.Parameter>(); parameters.Add(new Parameter { ParameterKey = "pEnvTag", ParameterValue = "development" }); List <string> notificationArns = new List <string>(); notificationArns.Add("aws:sns:eu-west-1:009837347446:tdwcftdevmainbastion-LoggingTemplate-1E3KD8XDHOSTY-rSecurityAlarmTopic-1TNN0GI7819UM"); List <string> resourceTypes = new List <string>(); resourceTypes.Add("AWS::*"); List <Amazon.CloudFormation.Model.Tag> tagList = new List <Amazon.CloudFormation.Model.Tag>(); tagList.Add(new Amazon.CloudFormation.Model.Tag() { Key = "environment", Value = "development" }); CreateChangeSetRequest cfReq = new CreateChangeSetRequest() { Capabilities = CfCapabilities, ChangeSetName = "tdwv010001", ChangeSetType = ChangeSetType.UPDATE, ClientToken = "fromappsettingsv010001", Description = "Adding kinesis template to tdw stack and parameterizing env parameter", //NotificationARNs = notificationArns, Parameters = parameters, //ResourceTypes = resourceTypes, //RoleARN StackName = stack_name, Tags = tagList, TemplateBody = data //UsePreviousTemplate = true }; CreateChangeSetResponse cfResp = cfClient.CreateChangeSet(cfReq); }
public void ObjectSamples() { { #region ListObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // List all objects ListObjectsRequest listRequest = new ListObjectsRequest { BucketName = "SampleBucket", }; ListObjectsResponse listResponse; do { // Get a list of objects listResponse = client.ListObjects(listRequest); foreach (S3Object obj in listResponse.S3Objects) { Console.WriteLine("Object - " + obj.Key); Console.WriteLine(" Size - " + obj.Size); Console.WriteLine(" LastModified - " + obj.LastModified); Console.WriteLine(" Storage class - " + obj.StorageClass); } // Set the marker property listRequest.Marker = listResponse.NextMarker; } while (listResponse.IsTruncated); #endregion } { #region GetObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObject request GetObjectRequest request = new GetObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and remember to dispose of the response using (GetObjectResponse response = client.GetObject(request)) { using (StreamReader reader = new StreamReader(response.ResponseStream)) { string contents = reader.ReadToEnd(); Console.WriteLine("Object - " + response.Key); Console.WriteLine(" Version Id - " + response.VersionId); Console.WriteLine(" Contents - " + contents); } } #endregion } { #region GetObject WriteResponseStreamToFile Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObject request GetObjectRequest request = new GetObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and remember to dispose of the response using (GetObjectResponse response = client.GetObject(request)) { // Save object to local file response.WriteResponseStreamToFile("Item1.txt"); } #endregion } { #region GetObjectMetadata Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a GetObjectMetadata request GetObjectMetadataRequest request = new GetObjectMetadataRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request and view the response GetObjectMetadataResponse response = client.GetObjectMetadata(request); Console.WriteLine("Content Length - " + response.ContentLength); Console.WriteLine("Content Type - " + response.Headers.ContentType); if (response.Expiration != null) { Console.WriteLine("Expiration Date - " + response.Expiration.ExpiryDate); Console.WriteLine("Expiration Rule Id - " + response.Expiration.RuleId); } #endregion } { #region PutObject Sample 1 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", ContentBody = "This is sample content..." }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 2 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", FilePath = "contents.txt" }; // Put object PutObjectResponse response = client.PutObject(request); #endregion } { #region PutObject Sample 3 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a PutObject request PutObjectRequest request = new PutObjectRequest { BucketName = "SampleBucket", Key = "Item1", }; using (FileStream stream = new FileStream("contents.txt", FileMode.Open)) { request.InputStream = stream; // Put object PutObjectResponse response = client.PutObject(request); } #endregion } { #region DeleteObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectRequest request = new DeleteObjectRequest { BucketName = "SampleBucket", Key = "Item1" }; // Issue request client.DeleteObject(request); #endregion } { #region DeleteObjects Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a DeleteObject request DeleteObjectsRequest request = new DeleteObjectsRequest { BucketName = "SampleBucket", Objects = new List <KeyVersion> { new KeyVersion() { Key = "Item1" }, // Versioned item new KeyVersion() { Key = "Item2", VersionId = "Rej8CiBxcZKVK81cLr39j27Y5FVXghDK", }, // Item in subdirectory new KeyVersion() { Key = "Logs/error.txt" } } }; try { // Issue request DeleteObjectsResponse response = client.DeleteObjects(request); } catch (DeleteObjectsException doe) { // Catch error and list error details DeleteObjectsResponse errorResponse = doe.Response; foreach (DeletedObject deletedObject in errorResponse.DeletedObjects) { Console.WriteLine("Deleted item " + deletedObject.Key); } foreach (DeleteError deleteError in errorResponse.DeleteErrors) { Console.WriteLine("Error deleting item " + deleteError.Key); Console.WriteLine(" Code - " + deleteError.Code); Console.WriteLine(" Message - " + deleteError.Message); } } #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region CopyObject Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request CopyObjectRequest request = new CopyObjectRequest { SourceBucket = "SampleBucket", SourceKey = "Item1", DestinationBucket = "AnotherBucket", DestinationKey = "Copy1", CannedACL = S3CannedACL.PublicRead }; // Issue request client.CopyObject(request); #endregion } { #region ListVersions Sample // Create a client AmazonS3Client client = new AmazonS3Client(); // Turn versioning on for a bucket client.PutBucketVersioning(new PutBucketVersioningRequest { BucketName = "SampleBucket", VersioningConfig = new S3BucketVersioningConfig { Status = "Enable" } }); // Populate bucket with multiple items, each with multiple versions PopulateBucket(client, "SampleBucket"); // Get versions ListVersionsRequest request = new ListVersionsRequest { BucketName = "SampleBucket" }; // Make paged ListVersions calls ListVersionsResponse response; do { response = client.ListVersions(request); // View information about versions foreach (var version in response.Versions) { Console.WriteLine("Key = {0}, Version = {1}, IsLatest = {2}, LastModified = {3}, Size = {4}", version.Key, version.VersionId, version.IsLatest, version.LastModified, version.Size); } request.KeyMarker = response.NextKeyMarker; request.VersionIdMarker = response.NextVersionIdMarker; } while (response.IsTruncated); #endregion } { #region Multipart Upload Sample int MB = (int)Math.Pow(2, 20); // Create a client AmazonS3Client client = new AmazonS3Client(); // Define input stream Stream inputStream = Create13MBDataStream(); // Initiate multipart upload InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1" }; InitiateMultipartUploadResponse initResponse = client.InitiateMultipartUpload(initRequest); // Upload part 1 UploadPartRequest uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 1, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up1Response = client.UploadPart(uploadRequest); // Upload part 2 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 2, PartSize = 5 * MB, InputStream = inputStream }; UploadPartResponse up2Response = client.UploadPart(uploadRequest); // Upload part 3 uploadRequest = new UploadPartRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartNumber = 3, InputStream = inputStream }; UploadPartResponse up3Response = client.UploadPart(uploadRequest); // List parts for current upload ListPartsRequest listPartRequest = new ListPartsRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId }; ListPartsResponse listPartResponse = client.ListParts(listPartRequest); Debug.Assert(listPartResponse.Parts.Count == 3); // Complete the multipart upload CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest { BucketName = "SampleBucket", Key = "Item1", UploadId = initResponse.UploadId, PartETags = new List <PartETag> { new PartETag { ETag = up1Response.ETag, PartNumber = 1 }, new PartETag { ETag = up2Response.ETag, PartNumber = 2 }, new PartETag { ETag = up3Response.ETag, PartNumber = 3 } } }; CompleteMultipartUploadResponse compResponse = client.CompleteMultipartUpload(compRequest); #endregion } }
static void CopyTemplatesToS3(EncryptionMaterials encryptionMaterials) { string bucket_Name = null; string dataPath = null; byte[] dataBytes = null; PutObjectRequest request = null; PutObjectResponse response = null; //AmazonS3EncryptionClient s3Client = new AmazonS3EncryptionClient(encryptionMaterials); AmazonS3Client s3Client = new AmazonS3Client(); try { TdwUtils.TearDownS3BucketByPrefix(s3Client, "tdwcftdev"); AmazonS3Util.DeleteS3BucketWithObjects(s3Client, QSS3BucketName); } catch (Exception ex) { ex = null; } bucket_Name = TdwUtils.CreateBucket(s3Client, QSS3BucketName); ///Cross stack communication, Parent dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathParentSubnet); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathParentSubnet.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///Cross stack communication, first child dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathChildSubnetProducer); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathChildSubnetProducer.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///Cross stack communication, second child dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathChildSubnet); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathChildSubnet.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///Application Template dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathApplication); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathApplication.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); //Config Rules dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathConfigRules); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathConfigRules.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///Iam Template dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathIam); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathIam.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); //Kinesis dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathKinesis); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathKinesis.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///Logging Template dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathLogging); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathLogging.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///Main Bastion Template dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathBastion); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathBastion.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///Management Vpc Template dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathManagementVpc); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathManagementVpc.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///Prod Vpc Template dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathProductionVpc); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathProductionVpc.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); ///ChangeSet Template for main bastion dataPath = TdwUtils.bingPathToAppDir(TdwUtils.cfClassPathBastionChangeSet); dataBytes = TdwUtils.FileToArray(dataPath); request = new PutObjectRequest() { BucketName = QSS3BucketName, Key = QSS3KeyPrefix + TdwUtils.cfClassPathBastionChangeSet.Replace("tdw_cf_template\\", ""), InputStream = new MemoryStream(dataBytes) }; response = s3Client.PutObject(request); }
public void PresignedURLSamples() { { #region GetPreSignedURL Sample 1 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request GetPreSignedUrlRequest request = new GetPreSignedUrlRequest { BucketName = "SampleBucket", Key = "Item1", Expires = DateTime.UtcNow.AddMinutes(5) }; // Get path for request string path = client.GetPreSignedURL(request); // Test by getting contents string contents = GetContents(path); #endregion } { #region GetPreSignedURL Sample 2 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request GetPreSignedUrlRequest request = new GetPreSignedUrlRequest { BucketName = "SampleBucket", Key = "Item1", Expires = DateTime.UtcNow.AddMinutes(5) }; request.ResponseHeaderOverrides.ContentType = "text/xml+zip"; request.ResponseHeaderOverrides.ContentDisposition = "attachment; filename=dispName.pdf"; request.ResponseHeaderOverrides.CacheControl = "No-cache"; request.ResponseHeaderOverrides.ContentLanguage = "mi, en"; request.ResponseHeaderOverrides.Expires = "Thu, 01 Dec 1994 16:00:00 GMT"; request.ResponseHeaderOverrides.ContentEncoding = "x-gzip"; // Get path for request string path = client.GetPreSignedURL(request); // Test by getting contents string contents = GetContents(path); #endregion } { #region GetPreSignedURL Sample 3 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request GetPreSignedUrlRequest request = new GetPreSignedUrlRequest { BucketName = "SampleBucket", Expires = DateTime.UtcNow.AddMinutes(5) }; // Get path for request string path = client.GetPreSignedURL(request); // Retrieve objects string allObjects = GetContents(path); #endregion } { #region GetPreSignedURL Sample 4 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request GetPreSignedUrlRequest request = new GetPreSignedUrlRequest { Expires = DateTime.UtcNow.AddMinutes(5) }; // Get path for request string path = client.GetPreSignedURL(request); // Retrieve buckets string allBuckets = GetContents(path); #endregion } { #region GetPreSignedURL Sample 5 // Create a client AmazonS3Client client = new AmazonS3Client(); // Create a CopyObject request GetPreSignedUrlRequest request = new GetPreSignedUrlRequest { BucketName = "SampleBucket", Key = "Item1", Verb = HttpVerb.PUT, Expires = DateTime.UtcNow.AddDays(10) }; // Get path for request string path = client.GetPreSignedURL(request); // Prepare data byte[] data = UTF8Encoding.UTF8.GetBytes("Sample text."); // Configure request HttpWebRequest httpRequest = WebRequest.Create(path) as HttpWebRequest; httpRequest.Method = "PUT"; httpRequest.ContentLength = data.Length; // Write data to stream Stream requestStream = httpRequest.GetRequestStream(); requestStream.Write(data, 0, data.Length); requestStream.Close(); // Issue request HttpWebResponse response = httpRequest.GetResponse() as HttpWebResponse; #endregion } }
private void Example() { //Simple example for S3... AmazonS3Client objAWSClient = new AmazonS3Client(Amazon.RegionEndpoint.USEast1); IFileServer server1 = new FileServerAmazonS3("/LocalFolder/" , "http://localhostedurl.com", objAWSClient, "S3BucketName"); IFileQuery file1 = new FileQuery("Desert.jpg", "uploads/images"); server1.StoreImage("C:\\SomeFile.jpg", file1); //Cleanup if (server1.FileExistsInCDN(file1) || server1.FileExistsLocal(file1)) server1.Delete(file1); //Simple example for Azure Microsoft.WindowsAzure.Storage.CloudStorageAccount objCDNClient = Microsoft.WindowsAzure.Storage.CloudStorageAccount.Parse( ConfigurationManager.ConnectionStrings["AzureStorageConnectionString"].ConnectionString); IFileServer server2 = new FileServerAzure("/LocalFolder/" , "http://localhostedurl.com", objCDNClient, "AzureBucketName"); IFileQuery file2 = new FileQuery("Jellyfish.jpg", "Images"); server2.StoreImage("C:\\SomeFile.jpg", file2); //Cleanup if (server2.FileExistsInCDN(file2) || server2.FileExistsLocal(file2)) server2.Delete(file2); //Look at all the ways an IFileQuery can be turned into useful paths, the file server setup //is designed so that every node can store it's files in different disk paths, but a given //FileQuery will find the correct file at any node, regardless of their specific configurations. //The "Base" path is the portion that is common to all nodes, local and remote. IFileServer server3 = new FileServerAzure("/Uploads/", "http://www.testdomain.com", null, "AzureBucketName"); IFileQuery file3 = new FileQuery("Awesome.jpg", "Images"); string strURL, strLocalRelativePath, strPhysicalPath, strBasePath, strCDNPath; strURL = server3.GetLocalURL(file3); //http://www.testdomain.com/Uploads/Images/Awesome.jpg strPhysicalPath = server3.GetLocalDiskPath(file3); //C:\\Users\\MyUser\\Documents\\General.CDN\\General.CDN.Tests\\bin\\Debug\\Uploads\\Images\\Awesome.jpg strLocalRelativePath = server3.GetLocalRelativePath(file3); //Uploads/Images/Awesome.jpg strBasePath = server3.GetBasePath(file3); //Images/Awesome.jpg strCDNPath = server3.GetCDNPath(file3, true); //unittest/Images/Awesome.jpg var blnExistsLocal = server3.FileExistsLocal(file3); var enuExistsHTTP = server3.FileExistsLocal_HTTPCheck(file3); var blnExistsRemote = server3.FileExistsInCDN(file3); //Would you like to know if a file on your local node is up to date... try this... //IsCurrentVersionOf uses MD5 hash matching for files less than 10MB //if the file is larger or MD5 is not available in remote server... file byte size will be used for comparison var props1CDN = server3.GetFilePropertiesFromCDN(file3); var props1Local = server3.GetFilePropertiesLocal(file3); bool blnUpToDate = props1Local.IsCurrentVersionOf(props1CDN); }
private void g_Static_ProcessingComplete(object sender, ProcessCompleteEventArgs e) { Guid g = Guid.NewGuid(); var context = GlobalHost.ConnectionManager.GetHubContext <ProgressHub>(); context.Clients.Client(_progressId).changePercent("100", "Processing"); // UPLOAD NEW SPRAY AmazonS3Client client = new AmazonS3Client("KEY", "SECRET", RegionEndpoint.USWest1); /* CHANGEME - Amazon S3 Key/Secret */ PutObjectRequest request = new PutObjectRequest(); request.PutObjectProgressEvent += request_Static_PutObjectProgressEvent; request.BucketName = _isLoggedIn ? BUCKET_REGISTERED_VTF : BUCKET_ANONYMOUS_VTF; request.Key = string.Format("{0}.vtf", g); request.InputStream = e.outputStream; request.CannedACL = S3CannedACL.PublicRead; request.AutoCloseStream = true; client.PutObject(request); // UPLOAD NEW PREVIEW NameValueCollection nvc = Request.Form; XDocument doc = XDocument.Parse(nvc["s3Response"]); var location = UploadToBlitline(g, doc, false); // SAVE TO DATABASE using (var db = new SprayContext()) { User u = null; DateTime expires = new DateTime(9999, 12, 31); if (_isLoggedIn) { u = db.Users.FirstOrDefault(x => x.SteamId == _baseSteamId); } else { expires = DateTime.Now.AddDays(7); } // Make a new spray object Spray spray = new Spray() { Animated = e.Animated, DateAdded = DateTime.Now, DateExpires = expires, Fading = e.Fading, Id = g, NSFW = false, Safeness = Models.Safeness.SFW, PreviewImage = string.Format("https://{0}/{1}{2}", _isLoggedIn ? BUCKET_REGISTERED_GIF : BUCKET_ANONYMOUS_GIF, g, location.ToString().EndsWith(".gif") ? ".gif" : ".png"), Status = Status.ACTIVE, Saves = 0, Url = string.Format("https://{0}/{1}", request.BucketName, request.Key), Creator = u }; // Tell the client to show the spray context.Clients.Client(_progressId).showImage(spray.PreviewImage, spray.Url, "/Create/VMT/" + g.ToString(), spray.Animated, "/Spray/" + g.ToString()); db.Sprays.Add(spray); db.SaveChanges(); } }