public async Task <dynamic> UploadObjectTask(BucketUploadFile file, string rootPath) { string fileSavePath = await CreateAndSaveFile(file, rootPath); IObjectsApi objects = GeneralTokenConfigurationSettings <IObjectsApi> .SetToken(new ObjectsApi(), await _authServiceAdapter.GetSecondaryTokenTask()); dynamic uploadObj = null; long fileSize = file.fileToUpload.Length; int UPLOAD_CHUNCK_SIZE = 2; string bucketKey = file.bucketKey; string fileName = Path.GetFileName(file.fileToUpload.FileName); uploadObj = await UploadObjByStatment(fileSize, UPLOAD_CHUNCK_SIZE, objects, bucketKey, fileName, fileSavePath); File.Delete(fileSavePath); return(new { uploadObject = uploadObj, token = objects.Configuration.AccessToken }); }
public async Task <dynamic> GetObjects(string bucketKey) { IObjectsApi api = GeneralTokenConfigurationSettings <IObjectsApi> .SetToken(new ObjectsApi(), await _authServiceAdapter.GetSecondaryTokenTask()); return(await api.GetObjectsAsync(bucketKey)); }
/// <summary> /// Generate signed URL for the OSS object. /// </summary> private static async Task <string> GetSignedUrl(IObjectsApi api, string bucketKey, string objectName, ObjectAccess access = ObjectAccess.Read, int minutesExpiration = 30) { var signature = new PostBucketsSigned(minutesExpiration); dynamic result = await api.CreateSignedResourceAsync(bucketKey, objectName, signature, AsString(access)); return(result.signedUrl); }
public static async Task <dynamic> UploadLessChunkSizeObject(this IObjectsApi objects, string path, string bucketKey) { using (StreamReader reader = new StreamReader(path)) { uploadObj = await objects.UploadObjectAsync(bucketKey, Path.GetFileName(path), (int)reader.BaseStream.Length, reader.BaseStream, contentDisposition); } return(uploadObj); }
private async Task AddObjectsToNode(string id) { IObjectsApi objectsApi = GeneralTokenConfigurationSettings <IObjectsApi> .SetToken(new ObjectsApi(), await _authServiceAdapter.GetSecondaryTokenTask()); var objectList = objectsApi.GetObjects(id); foreach (KeyValuePair <string, dynamic> objInfo in new DynamicDictionaryItems(objectList.items)) { nodes.Add(new TreeNode(Base64Encoding.Encode((string)objInfo.Value.objectId), objInfo.Value.objectKey, "object", false)); } }
public static async Task <dynamic> UploadMoreThanChunkSizeObject(this IObjectsApi objects, long fileSize, string bucketKey, string fileName, string filePath, int uploadChunkSize = 2) { long chunkSize = uploadChunkSize * 1024 * 1024; long numbersOfChunk = (long)Math.Round((double)(fileSize / chunkSize)) + 1; long start = 0; chunkSize = (numbersOfChunk > 1 ? chunkSize : fileSize); long end = chunkSize; string sessionId = Guid.NewGuid().ToString(); string objectKey = Path.GetFileName(fileName); using (BinaryReader reader = new BinaryReader(new FileStream(filePath, FileMode.Open))) { for (int i = 0; i < numbersOfChunk; i++) { string range = string.Format("bytes {0}-{1}/{2}", start, end, fileSize); long numberOfBytes = chunkSize + 1; byte[] fileBytes = new byte[numberOfBytes]; using (MemoryStream memoryStream = new MemoryStream(fileBytes)) { reader.BaseStream.Seek((int)start, SeekOrigin.Begin); int count = reader.Read(fileBytes, 0, (int)numberOfBytes); await memoryStream.WriteAsync(fileBytes, 0, (int)numberOfBytes); memoryStream.Position = 0; uploadObj = await objects.UploadChunkAsync(bucketKey, objectKey, (int)numberOfBytes, range, sessionId, memoryStream, contentDisposition); start = end + 1; chunkSize = ((start + chunkSize > fileSize) ? fileSize - start - 1 : chunkSize); end = start + chunkSize; } } } return(uploadObj); }
private async Task <dynamic> UploadObjByStatment(long fileSize, int UPLOAD_CHUNCK_SIZE, IObjectsApi objects, string bucketKey, string fileName, string fileSavePath) { if (fileSize > UPLOAD_CHUNCK_SIZE * 1024 * 1024) { return(await objects.UploadMoreThanChunkSizeObject(fileSize, bucketKey, fileName, fileSavePath, UPLOAD_CHUNCK_SIZE)); } else { return(await objects.UploadLessChunkSizeObject(fileSavePath, bucketKey)); } }