public static void Move(string oldFileKey, string newFileKey, string folderName, bool overWrite = false) { if (!IsUseS3) { return; } if (string.IsNullOrEmpty(oldFileKey)) { return; } if (overWrite) { Delete(newFileKey); } using (IAmazonS3 client = new AmazonS3Client(Region)) { S3FileInfo currentObject = new S3FileInfo(client, Bucket, oldFileKey); S3DirectoryInfo destination = new S3DirectoryInfo(client, Bucket, folderName); if (!destination.Exists) { destination.Create(); } S3FileInfo movedObject = currentObject.MoveTo(Bucket, newFileKey); } MakePublic(newFileKey); }
/// <summary> /// Implementation of the ZephyrDirectory Create method in Amazon S3 Storage. /// </summary> /// <param name="failIfExists">Throws an error if the directory already exists.</param> /// <param name="callbackLabel">Optional "label" to be passed into the callback method.</param> /// <param name="callback">Optional method that is called for logging purposes.</param> /// <returns>An AmazonS3ZephyrDictionary Instance.</returns> public override ZephyrDirectory Create(bool failIfExists = false, bool verbose = true, string callbackLabel = null, Action <string, string> callback = null) { if (_client == null) { throw new Exception($"AWSClient Not Set."); } if (this.Exists && failIfExists) { throw new Exception($"Directory [{FullName}] Already Exists."); } String key = ObjectKey; if (key.EndsWith("/")) { key = key.Substring(0, key.Length - 1); } S3DirectoryInfo dirInfo = new S3DirectoryInfo(_client.Client, BucketName, key); dirInfo.Create(); if (verbose) { Logger.Log($"Directory [{FullName}] Was Created.", callbackLabel, callback); } return(this); }
public S3Storage() { const string filename = "keyxml.pk"; var path = WebServerPathUtils.GetPathTo(Path.Combine("bin", filename)); var f = new FileInfo(path); if (f.Exists) { using (var file = f.OpenRead()) { var keyString = new StreamReader(file).ReadToEnd(); _algorithm = RSA.Create(); _algorithm.FromXmlString(keyString); var encryptionMaterials = new EncryptionMaterials(_algorithm); try { _client = new AmazonS3EncryptionClient(encryptionMaterials); var bucket = new S3DirectoryInfo(_client, PdfDocumentsBucketName); if (!bucket.Exists) { bucket.Create(); } } catch (Exception ex) { Console.WriteLine("Unable to initialize S3 client\n" + ex); } } } }
public void CreateFolder(string path) { path = CleanPath(path); var dir = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); dir.Create(); }
public AWSStorageClient(string specificFolder) { _storageClient = AWSClientFactory.CreateAmazonS3Client(); S3DirectoryInfo rootDirectory = new S3DirectoryInfo(_storageClient, BucketName); rootDirectory.Create(); _subDirectory = rootDirectory.CreateSubdirectory(specificFolder); }
public AWSStorageClientClient(string directory) { _storageClient = AWSClientFactory.CreateAmazonS3Client(); S3DirectoryInfo rootDirectory = new S3DirectoryInfo(_storageClient, BucketName); rootDirectory.Create(); _mainDirectory = rootDirectory.CreateSubdirectory(directory); _outputDirectory = rootDirectory.CreateSubdirectory("AnalysisOutput"); }
public void CreateFolder(string path) { if (IsFolderExits(path)) { throw new InvalidOperationException("Directory " + path + " already exists"); } var di = new S3DirectoryInfo(_amazonS3, _bucketName, path); di.Create(); }
public override string GetFolderPath(NonSecureFileModel filemodel) { string folder = CreateAbsoultePath(filemodel.FileDocType); S3DirectoryInfo dirInfo = new S3DirectoryInfo(s3Client, bucketName, folder); if (!dirInfo.Exists) { dirInfo.Create(); } return(folder); }
public bool TryCreateFolder(string path) { if (IsFolderExits(path)) { return(false); } var di = new S3DirectoryInfo(_amazonS3, _bucketName, path); di.Create(); return(true); }
public bool TryCreateFolder(string path) { try { path = CleanPath(path); var dir = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); dir.Create(); } catch { return(false); } return(true); }
public void CreateKeyName(string keyname) { try { using (var client = new AmazonS3Client(_awsAccessKey, _awsSecretAccessKey)) { S3DirectoryInfo directory = new S3DirectoryInfo(client, this.BucketName, keyname.ToFullS3KeyName(this.KeyName)); directory.Create(); } } catch (AmazonS3Exception amazonS3Exception) { throw new Exception($"Amazon Exception {amazonS3Exception}"); } }
/// <summary> /// Gets information of directory. If it does not exist, it will create it. /// </summary> /// <param name="s3Client"></param> /// <param name="s3Directory"></param> /// <param name="bucketName"></param> /// <param name="cancellationToken"></param> /// <returns>S3DirectoryInfo</returns> public static S3DirectoryInfo GetS3Directory( IAmazonS3 s3Client, string s3Directory, string bucketName, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); var dirInfo = new S3DirectoryInfo(s3Client, bucketName, s3Directory); if (dirInfo.Exists) { return(dirInfo); } dirInfo.Create(); return(dirInfo); }
public static void CreateDirectory(string folderName) { if (!IsUseS3) { return; } using (IAmazonS3 client = new AmazonS3Client(Region)) { S3DirectoryInfo destination = new S3DirectoryInfo(client, Bucket, folderName); if (!destination.Exists) { destination.Create(); } } }
public void Init() { var amazonS3Client = new AmazonS3Client(RegionEndpoint.USEast1); _s3DirectoryInfo = new S3DirectoryInfo(amazonS3Client, _testDirectory); if (_s3DirectoryInfo.Exists) { _s3DirectoryInfo.Delete(true); } _s3DirectoryInfo.Create(); _targetDirectory = new S3DirectoryObject(amazonS3Client, _testDirectory); _sourceObject = DirectoryHelper.CreateFullDirectory(); }
public OperationResponse CopyFiles(string sourceBucket, string sourceKey, string destinationBucket, string destinationKey, bool moveFiles, bool overwriteDestinationFolder, RegionEndpoint region) { var operationResponse = new OperationResponse(); try { if (overwriteDestinationFolder) { operationResponse = this.DeleteFolder(destinationBucket, destinationKey, region); } this.IsValid(_awsAccessKey, _awsSecretAccessKey, sourceBucket, sourceKey, destinationBucket, destinationKey, region); using (var client = new AmazonS3Client(_awsAccessKey, _awsSecretAccessKey, region)) { S3DirectoryInfo origin = new S3DirectoryInfo(client, sourceBucket, sourceKey.ToSlashesFileSystem()); S3DirectoryInfo target = new S3DirectoryInfo(client, destinationBucket, destinationKey.ToSlashesFileSystem()); target.Create(); this.ParallelTransferring(client, origin, target, moveFiles); if (moveFiles) { this.DeleteFolder(sourceBucket, sourceKey.ToSlashesFileSystem(), region); } } operationResponse.StatusCode = System.Net.HttpStatusCode.OK; } catch (AmazonS3Exception amazonS3Exception) { if (amazonS3Exception.ErrorCode != null && (amazonS3Exception.ErrorCode.Equals("InvalidAccessKeyId") || amazonS3Exception.ErrorCode.Equals("InvalidSecurity"))) { operationResponse.Message = "Please check the provided AWS Credentials. If you haven't signed up for Amazon S3, please visit http://aws.amazon.com/s3"; operationResponse.StatusCode = System.Net.HttpStatusCode.InternalServerError; } else { operationResponse.Message = amazonS3Exception.Message; operationResponse.StatusCode = System.Net.HttpStatusCode.InternalServerError; } } catch (Exception oException) { operationResponse.Message = oException.Message; operationResponse.StatusCode = System.Net.HttpStatusCode.InternalServerError; } return(operationResponse); }
public void Rename(string newName) { var newKey = _key.Replace(Name, newName); var newDirectory = new S3DirectoryInfo(_s3Client, _bucketName, newKey); if (!newDirectory.Exists) { newDirectory.Create(); } foreach (var s3FileInfo in _s3DirectoryInfo.GetFiles()) { s3FileInfo.MoveTo(newDirectory); } _s3DirectoryInfo.Delete(); _s3DirectoryInfo = newDirectory; }
public AWSStorageClient(string specificFolder) { _apiClientName = specificFolder; _storageClient = AWSClientFactory.CreateAmazonS3Client(); _rootDirectory = new S3DirectoryInfo(_storageClient, BucketName); _rootDirectory.Create(); _subDirectory = _rootDirectory.CreateSubdirectory(_apiClientName); // Get the correct subdirectory var fileNames = _subDirectory.EnumerateFiles().Select(file => file.Name).ToArray(); _latestDate = fileNames.Length > 0 ? fileNames .Max( fileName => { int index = fileName.IndexOf('.'); DateTime date; return(DateTime.TryParse(fileName.Substring(0, index), out date) ? date : DateTime.MinValue); }).AddDays(1) : _minDate; }
private void btnScript_Click(object sender, EventArgs e) { set2Config(); u_config.printConfig(@"C:\Wavelets decomposition\config.txt", null); // Create a client AmazonS3Config confisS3 = new AmazonS3Config { ProxyHost = null }; TimeSpan timeOUT = new TimeSpan(3, 0, 0); confisS3.ReadWriteTimeout = timeOUT; confisS3.Timeout = timeOUT; AmazonS3Client client = new AmazonS3Client(confisS3); UseS3 = UseS3CB.Checked; rumPrallel = rumPrallelCB.Checked; runBoosting = runBoostingCB.Checked; runProoning = runProoningCB.Checked; runBoostingProoning = runBoostingProoningCB.Checked; runRFProoning = runRFProoningCB.Checked; runRf = runRfCB.Checked; runBoostingLearningRate = runBoostingLearningRateCB.Checked; bucketName = bucketTB.Text; string results_path = @ResultsTB.Text; string db_path = @DBTB.Text + "\\";//@"C:\Users\Administrator\Dropbox\ADA\ada_valid\"; //"D:\\Phd\\Shai\\code\\tests\\helix tests\\noise_5\\noise_5\\"; // "C:\\reasearch\\tests\\lena\\"; //get dir MainFolderName = results_path; if (!UseS3) { if (!Directory.Exists(MainFolderName)) { Directory.CreateDirectory(MainFolderName); } } if (UseS3) { S3DirectoryInfo s3results_path = new S3DirectoryInfo(client, bucketName, results_path); if (!s3results_path.Exists) { s3results_path.Create(); } //set archive path //S3DirectoryInfo s3archive_path = new S3DirectoryInfo(client, bucketName, results_path + "\\archive"); //if (!s3archive_path.Exists) // s3archive_path.Create(); } //READ DATA DB db = new DB(); db.training_dt = db.getDataTable(db_path + "trainingData.txt"); db.testing_dt = db.getDataTable(db_path + "testingData.txt"); db.validation_dt = db.getDataTable(db_path + "ValidData.txt"); db.training_label = db.getDataTable(db_path + "trainingLabel.txt"); db.testing_label = db.getDataTable(db_path + "testingLabel.txt"); db.validation_label = db.getDataTable(db_path + "ValidLabel.txt"); //db.training_label = db.getDataTable(db_path + "trainingLabel" + t.ToString() + ".txt"); upper_label = db.training_label.Max(); lower_label = db.training_label.Min(); double trainingPercent = double.Parse(trainingPercentTB.Text); // 0.02; long rowToRemoveFrom = Convert.ToInt64(db.training_dt.Count() * trainingPercent); db.training_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.training_label = db.training_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_dt = db.testing_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_label = db.testing_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_label = db.validation_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); ////REDUCE DIM, GLOBAL PCA if (usePCA.Checked) { DimReduction dimreduction = new DimReduction(db.training_dt); db.PCAtraining_dt = dimreduction.getGlobalPca(db.training_dt); db.PCAtesting_dt = dimreduction.getGlobalPca(db.testing_dt); db.PCAvalidation_dt = dimreduction.getGlobalPca(db.validation_dt); //printtable(db.PCAtraining_dt, MainFolderName + "\\PCAtraining_dt.txt");//dbg //printtable(db.PCAtesting_dt, MainFolderName + "\\PCAtesting_dt.txt");//dbg } else { //de-activate pca for dbg db.PCAtraining_dt = db.training_dt; db.PCAtesting_dt = db.testing_dt; db.PCAvalidation_dt = db.validation_dt; } db.PCAtraining_GridIndex_dt = new long[db.PCAtraining_dt.Count()][]; for (int i = 0; i < db.PCAtraining_dt.Count(); i++) { db.PCAtraining_GridIndex_dt[i] = new long[db.PCAtraining_dt[i].Count()]; } //BOUNDING BOX AND MAIN GRID boundingBox = db.getboundingBox(db.PCAtraining_dt); MainGrid = db.getMainGrid(db.PCAtraining_dt, boundingBox, ref db.PCAtraining_GridIndex_dt); //READ CONFIG methodConfig mc = new methodConfig(true); int Nloops = int.Parse(NloopsTB.Text) - 1; int Kfolds = 0; if (int.TryParse(croosValidTB.Text, out Kfolds)) { Nloops = Kfolds - 1; } for (int k = 0; k < Nloops; k++) { mc.boostlamda_0.Add(3.8); // - create variant in number of pixels } //mc.boostlamda_0.Add(1500);// - create variant in number of pixels //mc.boostlamda_0.Add(2500);// - create variant in number of pixels //mc.boostlamda_0.Add(3000);// - create variant in number of pixels mc.generateRecordConfigArr(); for (int k = 0; k < mc.recArr.Count(); k++) { mc.recArr[k].dim = NfeaturesTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesTB.Text, k)); mc.recArr[k].approxThresh = double.Parse(evaluateString(approxThreshTB.Text, k)); // 0.1; mc.recArr[k].partitionErrType = int.Parse(evaluateString(partitionTypeTB.Text, k)); //2; mc.recArr[k].minWaveSize = int.Parse(evaluateString(minNodeSizeTB.Text, k)); //1;//CHANGE AFTER DBG mc.recArr[k].hopping_size = int.Parse(evaluateString(waveletsSkipEstimationTB.Text, k)); //25;// 10 + 5 * (k + 1);// +5 * (k % 10);// 1;//25; mc.recArr[k].test_error_size = double.Parse(evaluateString(waveletsPercentEstimationTB.Text, k)); // +0.05 * (k % 10);// 1;// 0.1;//percent of waves to check mc.recArr[k].NskipsinKfunc = double.Parse(evaluateString(boostingKfuncPercentTB.Text, k)); // 0.0025; mc.recArr[k].rfBaggingPercent = double.Parse(evaluateString(bagginPercentTB.Text, k)); // 0.6; mc.recArr[k].rfNum = int.Parse(evaluateString(NrfTB.Text, k)); // k + 1;//10 + k*10;// 100 / (k + 46) * 2;// int.Parse(Math.Pow(10, k + 1).ToString()); mc.recArr[k].boostNum = int.Parse(evaluateString(NboostTB.Text, k)); // 10; mc.recArr[k].boostProoning_0 = int.Parse(evaluateString(NfirstPruninginBoostingTB.Text, k)); //13 mc.recArr[k].boostlamda_0 = double.Parse(evaluateString(boostingLamda0TB.Text, k)); // 0.01 - (k + 1) * 0.001; //0.05;// 0.0801 + k * 0.001;// Math.Pow(0.1, k);// 0.22 + k*0.005; mc.recArr[k].NwaveletsBoosting = int.Parse(evaluateString(NfirstwaveletsBoostingTB.Text, k)); // 4;// k + 1; //mc.recArr[k].learningRate = 0;// 0.01; mc.recArr[k].boostNumLearningRate = int.Parse(evaluateString(NboostingLearningRateTB.Text, k)); // 55;// 18; mc.recArr[k].percent_training_db = trainingPercent; mc.recArr[k].BoundLevel = int.Parse(evaluateString(boundLevelTB.Text, k)); //1024; mc.recArr[k].NDimsinRF = NfeaturesrfTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesrfTB.Text, k)); mc.recArr[k].split_type = int.Parse(evaluateString(splitTypeTB.Text, k)); //0 mc.recArr[k].NormLPType = int.Parse(evaluateString(errTypeEstimationTB.Text, k)); mc.recArr[k].RFpruningTestRange[1] = int.Parse(evaluateString(RFpruningEstimationRange1TB.Text, k)); // 12;// k + 9; mc.recArr[k].boundDepthTree = int.Parse(evaluateString(boundDepthTB.Text, k)); //1024; mc.recArr[k].CrossValidFold = k; // 2m0rr0w2 save labels dim in confif mc.recArr[k].labelDim = db.training_label[0].Count(); //mc.recArr[k].boostNum = t ;// tmp to delete !!!!!!! //mc.recArr[k].RFwaveletsTestRange[0] = 25; //mc.recArr[k].RFwaveletsTestRange[1] = 50; } //create dirs foreach (recordConfig t in mc.recArr) { if (!UseS3 && !Directory.Exists(MainFolderName + "\\" + t.getShortName())) { Directory.CreateDirectory(MainFolderName + "\\" + t.getShortName()); StreamWriter sw = new StreamWriter(MainFolderName + "\\" + t.getShortName() + "\\record_properties.txt", false); sw.WriteLine(t.getFullName()); sw.Close(); u_config.printConfig(MainFolderName + "\\config.txt", null); } if (!UseS3) { continue; } S3DirectoryInfo s3results_path_with_folders = new S3DirectoryInfo(client, bucketName, results_path + "\\" + t.getShortName()); if (!s3results_path_with_folders.Exists) { s3results_path_with_folders.Create(); S3FileInfo outFile = s3results_path_with_folders.GetFile("record_properties.txt"); StreamWriter sw = new StreamWriter(outFile.OpenWrite()); sw.WriteLine(t.getFullName()); sw.Close(); S3FileInfo configFile = s3results_path_with_folders.GetFile("config.txt"); u_config.printConfig("", configFile); } } //SET ID ARRAY LIST List <int> trainingID = Enumerable.Range(0, db.PCAtraining_dt.Count()).ToList(); List <int> testingID = Enumerable.Range(0, db.PCAtesting_dt.Count()).ToList(); //cross validation List <List <int> > trainingFoldId = new List <List <int> >(); List <List <int> > testingFoldId = new List <List <int> >(); Random ran = new Random(2); List <int> training_rand = trainingID.OrderBy(x => ran.Next()).ToList().GetRange(0, trainingID.Count); //THE LARGEST GROUP IS TRAINING if (int.TryParse(croosValidTB.Text, out Kfolds)) { createCrossValid(Kfolds, training_rand, trainingFoldId, testingFoldId); } //bounding intervals int[][] BB = new int[2][]; BB[0] = new int[boundingBox[0].Count()]; BB[1] = new int[boundingBox[0].Count()]; for (int i = 0; i < boundingBox[0].Count(); i++) { BB[1][i] = MainGrid[i].Count() - 1; //set last index in each dim } for (int i = 0; i < mc.recArr.Count; i++) { Analizer Analizer = new Analizer(MainFolderName + "\\" + mc.recArr[i].getShortName(), MainGrid, db, mc.recArr[i]); if (!croosValidCB.Checked) { Analizer.analize(trainingID, testingID, BB); } else { Analizer.analize(trainingFoldId[i], testingFoldId[i], BB); //cross validation } } btnScript.BackColor = Color.Green; }
private static void Main(string[] args) { if (checkRequiredFields()) { using (client = new AmazonS3Client(new AmazonS3Config() { MaxErrorRetry = 2, ThrottleRetries = true })) { // Creates the bucket. S3DirectoryInfo rootDirectory = new S3DirectoryInfo(client, bucketName); rootDirectory.Create(); // Creates a file at the root of the bucket. S3FileInfo readme = rootDirectory.GetFile("README.txt"); using (StreamWriter writer = new StreamWriter(readme.OpenWrite())) writer.WriteLine("This is my readme file."); // Create a directory called code and write a file to it. S3DirectoryInfo codeDir = rootDirectory.CreateSubdirectory("wiki"); S3FileInfo codeFile = codeDir.GetFile("Phantasmagoria.txt"); using (StreamWriter writer = new StreamWriter(codeFile.OpenWrite())) { writer.WriteLine("Phantasmagoria (About this sound American pronunciation (help·info), also fantasmagorie, fantasmagoria) was a form of horror theatre that "); writer.WriteLine("(among other techniques) used one or more magic lanterns to project frightening images such as skeletons, demons, and "); writer.WriteLine("ghosts onto walls, smoke, or semi-transparent screens, typically using rear projection to keep the lantern out of sight. Mobile or "); writer.WriteLine("portable projectors were used, allowing the projected image to move and change size on the screen, and multiple projecting "); writer.WriteLine("devices allowed for quick switching of different images. In many shows the use of spooky decoration, total darkness, sound "); writer.WriteLine("effects, (auto-)suggestive verbal presentation and sound effects were also key elements. Some shows added all kinds of "); writer.WriteLine("sensory stimulation, including smells and electric shocks. Even required fasting, fatigue (late shows) and drugs have been "); writer.WriteLine("mentioned as methods of making sure spectators would be more convinced of what they saw. The shows started under the "); writer.WriteLine("guise of actual séances in Germany in the late 18th century, and gained popularity through most of Europe (including Britain) "); writer.WriteLine("throughout the 19th century."); } // Create a directory called license and write a file to it. S3DirectoryInfo licensesDir = rootDirectory.CreateSubdirectory("licenses"); S3FileInfo licenseFile = licensesDir.GetFile("license.txt"); using (StreamWriter writer = new StreamWriter(licenseFile.OpenWrite())) writer.WriteLine("A license to code"); Console.WriteLine("Write Directory Structure"); Console.WriteLine("------------------------------------"); WriteDirectoryStructure(rootDirectory, 0); Console.WriteLine("\n\n"); foreach (var file in codeDir.GetFiles()) { Console.WriteLine("Content of {0}", file.Name); Console.WriteLine("------------------------------------"); using (StreamReader reader = file.OpenText()) { Console.WriteLine(reader.ReadToEnd()); } } // Deletes all the files and then the bucket. if (deleteAtEnd) { rootDirectory.Delete(true); } } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
public bool TryCreateFolder(string path) { try { path = CleanPath(path); var dir = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); dir.Create(); } catch { return false; } return true; }
public void Create() { _s3DirectoryInfo.Create(); }
static void Main(string[] args) { if (checkRequiredFields()) { using (client = Amazon.AWSClientFactory.CreateAmazonS3Client(RegionEndpoint.USWest2)) { // Creates the bucket. S3DirectoryInfo rootDirectory = new S3DirectoryInfo(client, bucketName); rootDirectory.Create(); // Creates a file at the root of the bucket. S3FileInfo readme = rootDirectory.GetFile("README.txt"); using (StreamWriter writer = new StreamWriter(readme.OpenWrite())) writer.WriteLine("This is my readme file."); DirectoryInfo localRoot = new DirectoryInfo(@"C:\"); DirectoryInfo localCode = localRoot.CreateSubdirectory("code"); // Create a directory called code and write a file to it. S3DirectoryInfo codeDir = rootDirectory.CreateSubdirectory("code"); S3FileInfo codeFile = codeDir.GetFile("Program.cs"); using (StreamWriter writer = new StreamWriter(codeFile.OpenWrite())) { writer.WriteLine("namespace S3FileSystem_Sample"); writer.WriteLine("{"); writer.WriteLine(" class Program"); writer.WriteLine(" {"); writer.WriteLine(" static void Main(string[] args)"); writer.WriteLine(" {"); writer.WriteLine(" Console.WriteLine(\"Hello World\");"); writer.WriteLine(" }"); writer.WriteLine(" }"); writer.WriteLine("}"); } // Create a directory called license and write a file to it. S3DirectoryInfo licensesDir = rootDirectory.CreateSubdirectory("licenses"); S3FileInfo licenseFile = licensesDir.GetFile("license.txt"); using (StreamWriter writer = new StreamWriter(licenseFile.OpenWrite())) writer.WriteLine("A license to code"); Console.WriteLine("Write Directory Structure"); Console.WriteLine("------------------------------------"); WriteDirectoryStructure(rootDirectory, 0); Console.WriteLine("\n\n"); foreach (var file in codeDir.GetFiles()) { Console.WriteLine("Content of {0}", file.Name); Console.WriteLine("------------------------------------"); using (StreamReader reader = file.OpenText()) { Console.WriteLine(reader.ReadToEnd()); } } // Deletes all the files and then the bucket. rootDirectory.Delete(true); } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
static void Main(string[] args) { if (checkRequiredFields()) { using (client = Amazon.AWSClientFactory.CreateAmazonS3Client(RegionEndpoint.USWest2)) { // Creates the bucket. S3DirectoryInfo rootDirectory = new S3DirectoryInfo(client, bucketName); rootDirectory.Create(); // Creates a file at the root of the bucket. S3FileInfo readme = rootDirectory.GetFile("README.txt"); using (StreamWriter writer = new StreamWriter(readme.OpenWrite())) writer.WriteLine("This is my readme file."); DirectoryInfo localRoot = new DirectoryInfo(@"C:\"); DirectoryInfo localCode = localRoot.CreateSubdirectory("code"); // Create a directory called code and write a file to it. S3DirectoryInfo codeDir = rootDirectory.CreateSubdirectory("code"); S3FileInfo codeFile = codeDir.GetFile("Program.cs"); using(StreamWriter writer = new StreamWriter(codeFile.OpenWrite())) { writer.WriteLine("namespace S3FileSystem_Sample"); writer.WriteLine("{"); writer.WriteLine(" class Program"); writer.WriteLine(" {"); writer.WriteLine(" static void Main(string[] args)"); writer.WriteLine(" {"); writer.WriteLine(" Console.WriteLine(\"Hello World\");"); writer.WriteLine(" }"); writer.WriteLine(" }"); writer.WriteLine("}"); } // Create a directory called license and write a file to it. S3DirectoryInfo licensesDir = rootDirectory.CreateSubdirectory("licenses"); S3FileInfo licenseFile = licensesDir.GetFile("license.txt"); using (StreamWriter writer = new StreamWriter(licenseFile.OpenWrite())) writer.WriteLine("A license to code"); Console.WriteLine("Write Directory Structure"); Console.WriteLine("------------------------------------"); WriteDirectoryStructure(rootDirectory, 0); Console.WriteLine("\n\n"); foreach (var file in codeDir.GetFiles()) { Console.WriteLine("Content of {0}", file.Name); Console.WriteLine("------------------------------------"); using (StreamReader reader = file.OpenText()) { Console.WriteLine(reader.ReadToEnd()); } } // Deletes all the files and then the bucket. rootDirectory.Delete(true); } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }