private static void Upload(Job job) { Cloud cloud; if (_cloudConfig.TryGetValue(job.cloud, out cloud)) { List <double> results = new List <double>(); switch (cloud.type) { case "s3": try { s3Manager s3 = new s3Manager(cloud.awsAccessKey, cloud.awsAccessKeySecret, cloud.awsServiceUrl, cloud.awsRegion, cloud.awsS3bucket); Parallel.For(0, job.fileCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { try { TimedEvent timer = new TimedEvent(); timer.sourceCity = _ip.city; timer.sourceCountry = _ip.country; timer.sourceIp = _ip.ip; timer.sourceLoc = _ip.loc; timer.sourceOrg = _ip.org; timer.sourcePostal = _ip.postal; timer.sourceRegion = _ip.region; timer.cloudName = cloud.name; timer.cloudType = cloud.type; timer.eventType = job.type; timer.fileName = job.filePrefix + i; timer.startTime = DateTime.UtcNow; byte[] bytes = fileManager.generateRandomBytes(job.fileSizeInBytes); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); //Upload s3.UploadFileAsync(bytes, job.filePrefix + i).GetAwaiter().GetResult(); stopwatch.Stop(); bytes = null; GC.Collect(); timer.elapsedMiliseconds = stopwatch.ElapsedMilliseconds; timer.finishTime = DateTime.UtcNow; timer.fileSizeInBytes = job.fileSizeInBytes; timedEvents.Enqueue(timer); results.Add(timer.elapsedMiliseconds); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.WriteLine("{0}{1} uploaded to {2}", job.filePrefix, i, cloud.name); }); } catch (Exception ex) { Console.WriteLine("Error in uploading files: {0}", ex.Message); } break; case "gcs": try { gcsManager gcs = new gcsManager(Encoding.UTF8.GetString(Convert.FromBase64String(cloud.credential)), cloud.gcsbucket, cloud.projectid); Parallel.For(0, job.fileCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { try { TimedEvent timer = new TimedEvent(); timer.sourceCity = _ip.city; timer.sourceCountry = _ip.country; timer.sourceIp = _ip.ip; timer.sourceLoc = _ip.loc; timer.sourceOrg = _ip.org; timer.sourcePostal = _ip.postal; timer.sourceRegion = _ip.region; timer.cloudName = cloud.name; timer.cloudType = cloud.type; timer.eventType = job.type; timer.fileName = job.filePrefix + i; timer.startTime = DateTime.UtcNow; byte[] bytes = fileManager.generateRandomBytes(job.fileSizeInBytes); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); //Upload gcs.UploadFileAsync(bytes, job.filePrefix + i).GetAwaiter().GetResult(); stopwatch.Stop(); bytes = null; GC.Collect(); timer.elapsedMiliseconds = stopwatch.ElapsedMilliseconds; timer.finishTime = DateTime.UtcNow; timer.fileSizeInBytes = job.fileSizeInBytes; timedEvents.Enqueue(timer); results.Add(timer.elapsedMiliseconds); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.WriteLine("{0}{1} uploaded to {2}", job.filePrefix, i, cloud.name); }); } catch (Exception ex) { Console.WriteLine("Error in uploading files {0}", ex.Message); } break; case "blob": try { blobManager blobClient = new blobManager(cloud.blobStorageAccountConnectionString, cloud.blobContainer); Parallel.For(0, job.fileCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { try { TimedEvent timer = new TimedEvent(); timer.sourceCity = _ip.city; timer.sourceCountry = _ip.country; timer.sourceIp = _ip.ip; timer.sourceLoc = _ip.loc; timer.sourceOrg = _ip.org; timer.sourcePostal = _ip.postal; timer.sourceRegion = _ip.region; timer.cloudName = cloud.name; timer.cloudType = cloud.type; timer.eventType = job.type; timer.fileName = job.filePrefix + i; timer.startTime = DateTime.UtcNow; byte[] bytes = fileManager.generateRandomBytes(job.fileSizeInBytes); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); //Upload blobClient.UploadFileAsync(bytes, job.filePrefix + i).GetAwaiter().GetResult(); stopwatch.Stop(); bytes = null; GC.Collect(); timer.elapsedMiliseconds = stopwatch.ElapsedMilliseconds; timer.finishTime = DateTime.UtcNow; timer.fileSizeInBytes = job.fileSizeInBytes; timedEvents.Enqueue(timer); results.Add(timer.elapsedMiliseconds); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.WriteLine("{0}{1} uploaded to {2}", job.filePrefix, i, cloud.name); }); } catch (Exception ex) { Console.WriteLine("Error in uploading files: {0}", ex.Message); } break; default: Console.WriteLine("Invalid cloud type specificed"); break; } Console.WriteLine("Average latency: {0}, 99th percentile: {1}", Average(results), Percentile(results, 0.99)); } }
private static void Download(Job job) { if (job.downloadCount == 0) { return; } Cloud cloud; if (_cloudConfig.TryGetValue(job.cloud, out cloud)) { List <string> urlsToDownload = new List <string>(); switch (cloud.type) { case "s3": try { s3Manager s3 = new s3Manager(cloud.awsAccessKey, cloud.awsAccessKeySecret, cloud.awsServiceUrl, cloud.awsRegion, cloud.awsS3bucket); List <string> s3files = s3.GetFileListAsync(job.filePrefix).GetAwaiter().GetResult(); Parallel.For(0, job.downloadCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { Random rnd = new Random(); int r = rnd.Next(s3files.Count); string url = s3.GeneratePreSignedURL(10, s3files[r]); urlsToDownload.Add(url); }); } catch (Exception ex) { Console.WriteLine("Error in downloading files: {0}", ex.Message); } break; case "blob": try { blobManager blobClient = new blobManager(cloud.blobStorageAccountConnectionString, cloud.blobContainer); List <string> blobFiles = blobClient.GetFileListAsync(job.filePrefix).GetAwaiter().GetResult(); string containerSas = blobClient.GetContainerSASRead(20); Parallel.For(0, job.downloadCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { Random rnd = new Random(); int r = rnd.Next(blobFiles.Count); string url = string.Format("{0}{1}", blobClient.GetBlobURL(blobFiles[r]), containerSas); urlsToDownload.Add(url); }); } catch (Exception ex) { Console.WriteLine("Error in uploading files: {0}", ex.Message); } break; case "gcs": gcsManager gcsClient = new gcsManager(Encoding.UTF8.GetString(Convert.FromBase64String(cloud.credential)), cloud.gcsbucket, cloud.projectid); List <string> gcsFiles = gcsClient.GetFileList(job.filePrefix); Parallel.For(0, job.downloadCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { Random rnd = new Random(); int r = rnd.Next(gcsFiles.Count); string url = gcsClient.GeneratePreSignedURL(10, gcsFiles[r]); urlsToDownload.Add(url); }); break; default: Console.WriteLine("Invalid cloud type specificed"); break; } List <double> results = new List <double>(); int downloadCount = 0; Parallel.ForEach(urlsToDownload, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, url => { try { TimedEvent timer = new TimedEvent(); timer.sourceCity = _ip.city; timer.sourceCountry = _ip.country; timer.sourceIp = _ip.ip; timer.sourceLoc = _ip.loc; timer.sourceOrg = _ip.org; timer.sourcePostal = _ip.postal; timer.sourceRegion = _ip.region; timer.cloudName = cloud.name; timer.cloudType = cloud.type; timer.eventType = job.type; timer.startTime = DateTime.UtcNow; timer.url = url; Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); //download var response = httpClient.GetAsync(url).Result; stopwatch.Stop(); timer.elapsedMiliseconds = stopwatch.ElapsedMilliseconds; timer.finishTime = DateTime.UtcNow; downloadCount++; var content = response.Content.ReadAsByteArrayAsync().GetAwaiter().GetResult(); timer.fileSizeInBytes = content.Length; timer.httpStatusCode = (int)response.StatusCode; timedEvents.Enqueue(timer); results.Add(timer.elapsedMiliseconds); Console.WriteLine("{0}: HTTP {1} - Downloaded {2} bytes from {3} in {4} ms", downloadCount, timer.httpStatusCode, timer.fileSizeInBytes, timer.cloudName, timer.elapsedMiliseconds); } catch (Exception ex) { Console.WriteLine("Error downloading: {0}", url); } }); Console.WriteLine("Average latency: {0}, 99th percentile: {1}", Average(results), Percentile(results, 0.99)); } }