//************************************** PRINT FUNCTIONS WITH OUT REFERENCES **************************************************** public static void printErrorsToFile(string filename, double l2, double l1, double l0, double testSize) { StreamWriter writer; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); writer = new StreamWriter(artFile.OpenWrite()); } else { writer = new StreamWriter(filename, false); } //WRITE writer.WriteLine("l2 estimation error: " + l2.ToString(CultureInfo.InvariantCulture)); writer.WriteLine("l1 estimation error: " + l1.ToString(CultureInfo.InvariantCulture)); writer.WriteLine("num of miss labels: " + l0.ToString(CultureInfo.InvariantCulture)); writer.WriteLine("num of tests: " + testSize.ToString(CultureInfo.InvariantCulture)); writer.WriteLine("sucess rate : " + (1 - (l0 / testSize)).ToString(CultureInfo.InvariantCulture)); writer.Close(); }
public static void printtable(List <int>[] table, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } for (int i = 0; i < table.Count(); i++) { var line = ""; for (int j = 0; j < table[i].Count(); j++) { line += table[i][j].ToString() + " "; } sw.WriteLine(line); } sw.Close(); }
/// <summary> /// Move feature with source delete and option for overwrite. /// </summary> /// <param name="file"></param> /// <param name="path"></param> /// <param name="overwrite"></param> /// <returns>FileInfo</returns> public static FileInfo MoveToLocal(this S3FileInfo file, string path, bool overwrite) { var localFile = file.CopyToLocal(path, overwrite); file.Delete(); return(localFile); }
public static void printList(List <double> lst, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } for (int i = 0; i < lst.Count(); i++) { sw.WriteLine(lst[i]); } sw.Close(); }
/// <summary> /// Implementation of the ZephyrFile Delete method in Amazon S3 Storage. /// </summary> /// <param name="stopOnError">Throw an exception when an error occurs.</param> /// <param name="verbose">Log details of file deleted.</param> /// <param name="callbackLabel">Optional "label" to be passed into the callback method.</param> /// <param name="callback">Optional method that is called for logging purposes.</param> public override void Delete(bool stopOnError = true, bool verbose = true, string callbackLabel = null, Action <string, string> callback = null) { try { if (_client == null) { throw new Exception($"AWSClient Not Set."); } S3FileInfo fileInfo = new S3FileInfo(_client.Client, BucketName, ObjectKey); if (fileInfo.Exists) { if (IsOpen) { Close(); } fileInfo.Delete(); } if (verbose) { Logger.Log($"File [{FullName}] Was Deleted.", callbackLabel, callback); } } catch (Exception e) { Logger.Log(e.Message, callbackLabel, callback); if (stopOnError) { throw; } } }
public async Task CreateFile(IStorageDirectory directory, string name, Stream file) { AWSStorageDirectory s3Directory = (directory as AWSStorageDirectory); if (s3Directory != null) { S3FileInfo s3File = s3Directory.DirectoryInfo.GetFile(name); if (!s3File.Exists) { string fullName = s3Directory.DirectoryInfo.GetDirectoryPath() + name; var request = new PutObjectRequest { BucketName = s3File.Directory.Bucket.Name, Key = fullName, InputStream = file }; PutObjectResponse response = await Task.Factory.FromAsync( _client.BeginPutObject(request, null, null), (result) => _client.EndPutObject(result)); } else { throw new ArgumentException("File already exists"); } } else { throw new ArgumentException("directory parameter must be created through the AWS library"); } }
/// <summary> /// Implementation of the ZephyrFile Create method in Amazon S3 Storage. /// </summary> /// <param name="overwrite">Will overwrite the file if it already exists.</param> /// <param name="callbackLabel">Optional "label" to be passed into the callback method.</param> /// <param name="callback">Optional method that is called for logging purposes.</param> /// <returns>An instance of a AmazonS3ZephyrFile.</returns> public override ZephyrFile Create(bool overwrite = true, bool verbose = true, string callbackLabel = null, Action <string, string> callback = null) { try { if (this.Exists && !overwrite) { throw new Exception($"File [{this.FullName}] Already Exists."); } if (_client == null) { throw new Exception($"AWSClient Not Set."); } S3FileInfo fileInfo = new S3FileInfo(_client.Client, BucketName, ObjectKey); this.Stream = fileInfo.Create(); // File isn't written to S3 Bucket Until The Stream Is Closed. Force Write By Closing Stream. this.Close(false); this.Open(AccessType.Write, false); if (verbose) { Logger.Log($"File [{FullName}] Was Created.", callbackLabel, callback); } return(this); } catch (Exception e) { Logger.Log($"ERROR - {e.Message}", callbackLabel, callback); throw; } }
/// <summary> /// Implementation of the ZephyrFile Open method in Amazon S3 Storage. /// </summary> /// <param name="access">Specifies to open Stream with "Read" or "Write" access.</param> /// <param name="callbackLabel">Optional "label" to be passed into the callback method.</param> /// <param name="callback">Optional method that is called for logging purposes.</param> /// <returns>The open Stream for the AmazonS3ZephyrFile.</returns> public override System.IO.Stream Open(AccessType access, bool verbose = true, string callbackLabel = null, Action <string, string> callback = null) { if (!Exists) { Create(verbose: false); Close(false); } if (!IsOpen) { if (_client == null) { throw new Exception($"AWSClient Not Set."); } S3FileInfo file = new S3FileInfo(_client.Client, BucketName, ObjectKey); if (access == AccessType.Read) { this.Stream = file.OpenRead(); } else if (access == AccessType.Write) { this.Stream = file.OpenWrite(); } else { throw new Exception($"Unknown AccessType [{access}] Received."); } } return(this.Stream); }
private static void UpdateFile(S3FileInfo file, AWSS3Object dbFile) { dbFile.FileModified = GetLastModifiedFromS3(file.FullName); dbFile.FileSize = file.Length; dbFile.LastScanned = scanTime; updated++; }
static public void printLevelWaveletNorm(List <GeoWave> decisionGeoWaveArr, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } /* int dataDim = decisionGeoWaveArr[0].rc.dim; * int labelDim = decisionGeoWaveArr[0].MeanValue.Count();*/ foreach (GeoWave t in decisionGeoWaveArr) { sw.WriteLine(t.level + ", " + t.norm); } sw.Close(); }
static public AWSStorageFile ToStorageFile(this S3FileInfo fileInfo) { AWSStorageFile targetFile = new AWSStorageFile(fileInfo); // Copy properties return(targetFile); }
public long FileLength(string filename, long lenghtFallback) { try { S3FileInfo s3FileInfo = new S3FileInfo(_s3Client, _containerName, _rootFolderName + filename); GetObjectMetadataRequest request = new GetObjectMetadataRequest(); request.Key = _rootFolderName + filename; request.BucketName = _containerName; GetObjectMetadataResponse response = _s3Client.GetObjectMetadata(request); var CachedLength = response.Metadata["CachedLength"]; if (!string.IsNullOrEmpty(CachedLength) && long.TryParse(CachedLength, out var ObjectLength)) { return(ObjectLength); } return(s3FileInfo.Length); } catch (Exception e) { // Sync(name); Trace.WriteLine( $"ERROR {e.ToString()} Exception thrown while retrieving file length of file {filename} for {_rootFolderName}"); return(lenghtFallback); } }
public void CopyLocalToS3(String localFile, S3FileInfo s3File) { FileStream local = fs.File.OpenRead(localFile, Alphaleonis.Win32.Filesystem.PathFormat.FullPath); Stream file = s3File.OpenWrite(); local.CopyTo(file); }
public IStorageFile CreateFile(string path) { //throw new NotImplementedException("File creation currently not supported."); Logger.Information("CreateFile"); PutObjectRequest request = new PutObjectRequest { BucketName = BucketName, Key = path, CannedACL = S3CannedACL.PublicRead, InputStream = new MemoryStream(), Timeout = TimeSpan.FromSeconds(300), ReadWriteTimeout = TimeSpan.FromMinutes(5) }; //request.WithBucketName().WithKey(path).WithCannedACL(S3CannedACL.PublicRead).WithInputStream(inputStream); // add far distance experiy date request.Headers["Expires"] = DateTime.Now.AddYears(10).ToString("ddd, dd, MMM yyyy hh:mm:ss") + " GMT"; request.Headers["x-amz-acl"] = "public-read"; var response = _client.PutObject(request); var fileInfo = new S3FileInfo(_client, BucketName, path); return(new S3StorageFile(fileInfo, this)); }
/// <summary> /// Opens the specified file for reading or writing. /// </summary> /// <param name="path">The path of the file to open.</param> /// <param name="mode">Specified if the file should be opened, created, overwritten or truncated.</param> /// <param name="access">Specified if the stream should be opened for reading or writing.</param> /// <returns>A <see cref="T:Systen.IO.Stream"/> that can be used to read or write the content of the file. </returns> public Stream OpenFileStream(string path, FileMode mode, FileAccess access) { if (path == null) { throw new ArgumentNullException("path"); } path = MapPath(path); path = path.Substring(this.BucketName.Length + 1); if (mode == FileMode.Open && access == FileAccess.Read) { var info = new S3FileInfo(S3, this.BucketName, path); return(info.OpenRead()); } else if (mode == FileMode.Open && access == FileAccess.Write) { var info = new S3FileInfo(S3, this.BucketName, path); return(info.OpenWrite()); } else if (mode == FileMode.Create || mode == FileMode.CreateNew && access == FileAccess.Write) { var info = new S3FileInfo(S3, this.BucketName, path); return(info.Create()); } throw new NotSupportedException(); }
private int LookupVersionId() { var fileKey = $"{_secretsDirectory}/{_dataFileName}"; using (IAmazonS3 s3Client = CreateS3Client()) { var s3FileInfo = new S3FileInfo(s3Client, _bucketName, fileKey); if (!s3FileInfo.Exists) { return(0); } var response = s3Client.GetObjectMetadata(new GetObjectMetadataRequest() { BucketName = _bucketName, Key = fileKey }); var fileProperty = response.Metadata["x-amz-meta-mversionid"]; int versionId; if (fileProperty == null || !int.TryParse(fileProperty, out versionId)) { return(0); } return(versionId); } }
/// <summary> /// Retrieves a file within the storage provider. /// </summary> /// <param name="path">The relative path to the file within the storage provider.</param> /// <returns>The file.</returns> /// <exception cref="ArgumentException">If the file is not found.</exception> public IStorageFile GetFile(string path) { EnsureInitialized(); if (_client == null) { return(null); } // seperate folder form file var request = new GetObjectRequest(); request.BucketName = BucketName; request.Key = path; request.ResponseExpires = DateTime.Now.AddMinutes(5); using (GetObjectResponse response = _client.GetObject(request)) { var fileInfo = new S3FileInfo(_client, BucketName, response.Key); return(new S3StorageFile(fileInfo, this)); } //using (GetObjectResponse response = client.GetObject(request)) //{ // response.Key.Substring() // foreach (var entry in response.S3Objects.Where(e => e.Key == path)) // { // var mimeType = AmazonS3Util.MimeTypeFromExtension(entry.Key.Substring(entry.Key.LastIndexOf(".", System.StringComparison.Ordinal))); // return new S3StorageFile(entry, mimeType); // } //} }
public static void Move(string oldFileKey, string newFileKey, string folderName, bool overWrite = false) { if (!IsUseS3) { return; } if (string.IsNullOrEmpty(oldFileKey)) { return; } if (overWrite) { Delete(newFileKey); } using (IAmazonS3 client = new AmazonS3Client(Region)) { S3FileInfo currentObject = new S3FileInfo(client, Bucket, oldFileKey); S3DirectoryInfo destination = new S3DirectoryInfo(client, Bucket, folderName); if (!destination.Exists) { destination.Create(); } S3FileInfo movedObject = currentObject.MoveTo(Bucket, newFileKey); } MakePublic(newFileKey); }
private static void ScanFile(S3FileInfo file) { if (scanned >= 5000) { SubmitBatch(); } DateTime fileModified = new SqlDateTime(GetLastModifiedFromS3(file.FullName)).Value; // if file is already in database try { AWSS3Object dbFile = db.AWSS3Objects.Single(f => f.FileKey == file.FullName); // if file was modified, update database record if (!fileModified.Equals(dbFile.FileModified)) { UpdateFile(file, dbFile); } else { dbFile.LastScanned = scanTime; } } catch (Exception e) { InsertFile(file); } scanned++; }
/// <summary> アップロード済みのファイルデータ構築. </summary> private async Task <S3FileInfo[]> BuildUploadedObjectInfos(S3Object[] s3Objects) { Debug.Log("Build uploaded objects info."); var s3FileInfos = new List <S3FileInfo>(); // ハッシュデータ取得. var hashTable = await GetUploadedObjectHashTable(s3Client, s3Objects); // データ情報構築. foreach (var s3Object in s3Objects) { var element = hashTable.FirstOrDefault(x => x.Key == s3Object.Key); var s3FileInfo = new S3FileInfo() { ObjectPath = s3Object.Key, Hash = element.Value, }; s3FileInfos.Add(s3FileInfo); } return(s3FileInfos.ToArray()); }
public void DeleteFile(string path) { path = CleanPath(path); var file = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); file.Delete(); }
public IStorageFile GetFile(string path) { path = CleanPath(path); var file = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); return(new AmazonS3StorageFile(file, this)); }
/// <summary> /// Delete Directory from S3 /// </summary> /// <param name="uploadDirectory"></param> /// <param name="bucket"></param> /// <returns></returns> public bool DeleteAsset(string bucket, string uploadDirectory) { try { S3DirectoryInfo directoryToDelete = new S3DirectoryInfo(_client, bucket, uploadDirectory); var directoryFiles = directoryToDelete.EnumerateFiles(); foreach (S3FileInfo file in directoryFiles) { S3FileInfo filetoDelete = new S3FileInfo(_client, bucket, file.FullName.Replace(bucket + ":\\", string.Empty)); if (filetoDelete.Exists) { filetoDelete.Delete(); } } if (directoryToDelete.Exists) { directoryToDelete.Delete(false); return(true); } } catch (Exception exception) { Console.WriteLine(exception.InnerException.Message); return(false); } return(false); }
public bool FileExists(string path) { path = CleanPath(path); var file = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); return(file.Exists); }
public void Test() { /* * var x = "woof/bubu/x"; * var dir = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, "a/b"); * dir.Create(); * Console.WriteLine("Name: " + dir.Name + ", FullName: " + dir.FullName); * * * var dir2 = new S3DirectoryInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, "1\\2"); * dir2.Create(); * Console.WriteLine("Name: " + dir2.Name + ", FullName: " + dir2.FullName); * var file = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, "1\\2\\t.txt"); * using (file.Create()) { } * Console.WriteLine("Name: {0}, FullName: {1}, DirName: {2}", file.Name, file.FullName, file.DirectoryName); * var file2 = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, "a/b/t2.txt"); * * using (file2.Create()) { } * Console.WriteLine("Name: {0}, FullName: {1}, DirName: {2}", file2.Name, file2.FullName, file2.DirectoryName); * =*/ using (var fsFileStream = File.OpenRead(@"E:\al\pics\adventure_time\x.jpg")) { var a3File = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, @"test\x2.jpg"); using (a3File.Create()) { } using (var outStream = a3File.OpenWrite()) { fsFileStream.CopyTo(outStream); } PublishFile(a3File.FullName); } }
static void Main(string[] args) { Console.WriteLine("Hello World!"); IAmazonS3 client = new AmazonS3Client("AKIA6PYYJMASGIKCXL53", "jK56S8EvgOA9lLNN8R3nNldE1IW2yVPXn9K0rNYH", RegionEndpoint.APSoutheast1); FileInfo file = new FileInfo(@"c:\test.txt"); string destPath = "data/test.txt"; S3FileInfo s3File = new S3FileInfo(client, "my-bucket-name", destPath); }
public Stream GetObjectStream(string path) { path = CleanPath(path); var file = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); return(file.OpenRead()); //return Download(path); }
public void CopyFile(string originalPath, string duplicatePath) { originalPath = CleanPath(originalPath); duplicatePath = CleanPath(duplicatePath); var file = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, originalPath); file.CopyToLocal(duplicatePath); }
public void RenameFile(string oldPath, string newPath) { oldPath = CleanPath(oldPath); newPath = CleanPath(newPath); var file = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, oldPath); file.MoveToLocal(newPath); }
public IStorageFile CreateFile(string path) { path = CleanPath(path); var file = new S3FileInfo(_client, _amazonS3StorageConfiguration.AWSFileBucket, path); using (file.Create()) { } PublishFile(path); return(new AmazonS3StorageFile(file, this)); }