private static void Upload(Job job) { Cloud cloud; if (_cloudConfig.TryGetValue(job.cloud, out cloud)) { List <double> results = new List <double>(); switch (cloud.type) { case "s3": try { s3Manager s3 = new s3Manager(cloud.awsAccessKey, cloud.awsAccessKeySecret, cloud.awsServiceUrl, cloud.awsRegion, cloud.awsS3bucket); Parallel.For(0, job.fileCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { try { TimedEvent timer = new TimedEvent(); timer.sourceCity = _ip.city; timer.sourceCountry = _ip.country; timer.sourceIp = _ip.ip; timer.sourceLoc = _ip.loc; timer.sourceOrg = _ip.org; timer.sourcePostal = _ip.postal; timer.sourceRegion = _ip.region; timer.cloudName = cloud.name; timer.cloudType = cloud.type; timer.eventType = job.type; timer.fileName = job.filePrefix + i; timer.startTime = DateTime.UtcNow; byte[] bytes = fileManager.generateRandomBytes(job.fileSizeInBytes); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); //Upload s3.UploadFileAsync(bytes, job.filePrefix + i).GetAwaiter().GetResult(); stopwatch.Stop(); bytes = null; GC.Collect(); timer.elapsedMiliseconds = stopwatch.ElapsedMilliseconds; timer.finishTime = DateTime.UtcNow; timer.fileSizeInBytes = job.fileSizeInBytes; timedEvents.Enqueue(timer); results.Add(timer.elapsedMiliseconds); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.WriteLine("{0}{1} uploaded to {2}", job.filePrefix, i, cloud.name); }); } catch (Exception ex) { Console.WriteLine("Error in uploading files: {0}", ex.Message); } break; case "gcs": try { gcsManager gcs = new gcsManager(Encoding.UTF8.GetString(Convert.FromBase64String(cloud.credential)), cloud.gcsbucket, cloud.projectid); Parallel.For(0, job.fileCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { try { TimedEvent timer = new TimedEvent(); timer.sourceCity = _ip.city; timer.sourceCountry = _ip.country; timer.sourceIp = _ip.ip; timer.sourceLoc = _ip.loc; timer.sourceOrg = _ip.org; timer.sourcePostal = _ip.postal; timer.sourceRegion = _ip.region; timer.cloudName = cloud.name; timer.cloudType = cloud.type; timer.eventType = job.type; timer.fileName = job.filePrefix + i; timer.startTime = DateTime.UtcNow; byte[] bytes = fileManager.generateRandomBytes(job.fileSizeInBytes); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); //Upload gcs.UploadFileAsync(bytes, job.filePrefix + i).GetAwaiter().GetResult(); stopwatch.Stop(); bytes = null; GC.Collect(); timer.elapsedMiliseconds = stopwatch.ElapsedMilliseconds; timer.finishTime = DateTime.UtcNow; timer.fileSizeInBytes = job.fileSizeInBytes; timedEvents.Enqueue(timer); results.Add(timer.elapsedMiliseconds); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.WriteLine("{0}{1} uploaded to {2}", job.filePrefix, i, cloud.name); }); } catch (Exception ex) { Console.WriteLine("Error in uploading files {0}", ex.Message); } break; case "blob": try { blobManager blobClient = new blobManager(cloud.blobStorageAccountConnectionString, cloud.blobContainer); Parallel.For(0, job.fileCount, new ParallelOptions { MaxDegreeOfParallelism = job.threads }, (i, state) => { try { TimedEvent timer = new TimedEvent(); timer.sourceCity = _ip.city; timer.sourceCountry = _ip.country; timer.sourceIp = _ip.ip; timer.sourceLoc = _ip.loc; timer.sourceOrg = _ip.org; timer.sourcePostal = _ip.postal; timer.sourceRegion = _ip.region; timer.cloudName = cloud.name; timer.cloudType = cloud.type; timer.eventType = job.type; timer.fileName = job.filePrefix + i; timer.startTime = DateTime.UtcNow; byte[] bytes = fileManager.generateRandomBytes(job.fileSizeInBytes); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); //Upload blobClient.UploadFileAsync(bytes, job.filePrefix + i).GetAwaiter().GetResult(); stopwatch.Stop(); bytes = null; GC.Collect(); timer.elapsedMiliseconds = stopwatch.ElapsedMilliseconds; timer.finishTime = DateTime.UtcNow; timer.fileSizeInBytes = job.fileSizeInBytes; timedEvents.Enqueue(timer); results.Add(timer.elapsedMiliseconds); } catch (Exception ex) { Console.WriteLine(ex.Message); } Console.WriteLine("{0}{1} uploaded to {2}", job.filePrefix, i, cloud.name); }); } catch (Exception ex) { Console.WriteLine("Error in uploading files: {0}", ex.Message); } break; default: Console.WriteLine("Invalid cloud type specificed"); break; } Console.WriteLine("Average latency: {0}, 99th percentile: {1}", Average(results), Percentile(results, 0.99)); } }
static void Main(string[] args) { //Load JSON file with configuration and job definitions string configJson = File.ReadAllText("job.json"); JobManager manager = new JobManager(configJson); foreach (var cloud in manager.job.config.cloud) { if (!_cloudConfig.ContainsKey(cloud.name)) { _cloudConfig.Add(cloud.name, cloud); } } //Determine client IP address for tracking source of test data using ipinfo service var response = httpClient.GetAsync("http://ipinfo.io/json").Result; _ip = JsonConvert.DeserializeObject <IPInfo>(response.Content.ReadAsStringAsync().GetAwaiter().GetResult()); Console.WriteLine("Job file contains {0} jobs and {1} storage targets", manager.job.jobs.Length, manager.job.config.cloud.Length); //Process each job sequentially foreach (var job in manager.job.jobs) { Console.WriteLine("****JOB STARED****\nJob Name: {0}\nType: {1}\nCloud: {2}\n\n", job.name, job.type, job.cloud); switch (job.type) { case "download": Download(job); break; case "upload": Upload(job); break; default: Console.WriteLine("Not a valid job type"); break; } Console.WriteLine("****JOB FINISHED****\n"); } //Pull out items from timing queue and write to CSV List <TimedEvent> events = new List <TimedEvent>(); while (!timedEvents.IsEmpty) { TimedEvent timed; if (timedEvents.TryDequeue(out timed)) { events.Add(timed); } } Cloud outputCloud; if (_cloudConfig.TryGetValue(manager.job.config.output, out outputCloud)) { using (var memoryStream = new MemoryStream()) { using (var streamWriter = new StreamWriter(memoryStream)) { using (var csvWriter = new CsvWriter(streamWriter)) { csvWriter.WriteRecords(events); } // StreamWriter gets flushed here. } string output = JsonConvert.SerializeObject(events); //Write CSV and JSON to blob target blobManager blobClient = new blobManager(outputCloud.blobStorageAccountConnectionString, "results"); blobClient.UploadFileAsync(memoryStream.ToArray(), string.Format("{0}.csv", DateTime.UtcNow.ToString("yyyyMMddHHmmss"))).GetAwaiter().GetResult(); blobClient.UploadFileAsync(Encoding.UTF8.GetBytes(output), string.Format("{0}.json", DateTime.UtcNow.ToString("yyyyMMddHHmmss"))).GetAwaiter().GetResult(); } Console.WriteLine("Wrote {0} timed events to output.json and output.csv and uploaded to {1}", events.Count, outputCloud.name); } Console.ReadLine(); }