protected override void ProcessRecord() { List <Job> joblist = JobSerializer.Deserialize(MDPath); Item.JsonIndented = true; DataSerializer.Serialize <List <Job> >(joblist, Console.Out, DataType.Json); }
public void WorkerDispose() { IJob job = new TestJob() { Id = Guid.NewGuid() }; SignalsRecord signals = new SignalsRecord() { QueueNames = "*", WorkerSignal = WorkerSignal.None, WorkingSignal = WorkingSignal.None }; QueueRecord queued = new QueueRecord() { Id = 12, ApplicationName = BlueCollarSection.Section.ApplicationName, Data = JsonConvert.SerializeObject(job), JobName = job.Name, JobType = JobSerializer.GetTypeName(job.GetType()), QueuedOn = DateTime.UtcNow, QueueName = "*", TryNumber = 1 }; var transaction = new Mock <IDbTransaction>(); var repository = new Mock <IRepository>(); repository.Setup(r => r.BeginTransaction()).Returns(transaction.Object); repository.Setup(r => r.BeginTransaction(It.IsAny <IsolationLevel>())).Returns(transaction.Object); repository.Setup(r => r.GetQueued(It.IsAny <string>(), It.IsAny <QueueNameFilters>(), It.IsAny <DateTime>(), It.IsAny <IDbTransaction>())).Returns(queued); repository.Setup(r => r.GetWorkingSignals(It.IsAny <long>(), It.IsAny <long?>(), It.IsAny <IDbTransaction>())).Returns(signals); var factory = new Mock <IRepositoryFactory>(); factory.Setup(f => f.Create()).Returns(repository.Object); var logger = new Mock <ILogger>(); Worker worker = null; try { worker = new Worker(BlueCollarSection.Section.ApplicationName, 1, "Test Worker", null, 1, false, factory.Object, logger.Object); worker.Start(); Thread.Sleep(1500); worker.Stop(false); worker.Dispose(); Assert.IsFalse(worker.LoopThreadsAreAlive); worker = null; } finally { if (worker != null) { worker.Dispose(); } } }
public void WorkerExecuteRetry() { IJob job = new TestJob() { ThrowException = true, Retries = 1 }; SignalsRecord signals = new SignalsRecord() { QueueNames = "*", WorkerSignal = WorkerSignal.None, WorkingSignal = WorkingSignal.None }; QueueRecord queued = new QueueRecord() { Id = 12, ApplicationName = BlueCollarSection.Section.ApplicationName, Data = JsonConvert.SerializeObject(job), JobName = job.Name, JobType = JobSerializer.GetTypeName(job.GetType()), QueuedOn = DateTime.UtcNow, QueueName = "*", TryNumber = 1 }; WorkingRecord working = Worker.CreateWorking(queued, 1, null, DateTime.UtcNow); working.Id = 13; var transaction = new Mock <IDbTransaction>(); var repository = new Mock <IRepository>(); repository.Setup(r => r.BeginTransaction()).Returns(transaction.Object); repository.Setup(r => r.BeginTransaction(It.IsAny <IsolationLevel>())).Returns(transaction.Object); repository.Setup(r => r.CreateWorking(It.IsAny <WorkingRecord>(), It.IsAny <IDbTransaction>())).Returns(working); repository.Setup(r => r.GetQueued(It.IsAny <string>(), It.IsAny <QueueNameFilters>(), It.IsAny <DateTime>(), It.IsAny <IDbTransaction>())).Returns(queued); repository.Setup(r => r.GetWorkingSignals(It.IsAny <long>(), It.IsAny <long?>(), It.IsAny <IDbTransaction>())).Returns(signals); var factory = new Mock <IRepositoryFactory>(); factory.Setup(f => f.Create()).Returns(repository.Object); var logger = new Mock <ILogger>(); using (Worker worker = new Worker(BlueCollarSection.Section.ApplicationName, 1, "Test Worker", null, 1, false, factory.Object, logger.Object)) { worker.Start(); Thread.Sleep(1500); Assert.AreEqual(WorkerStatus.Working, worker.Status); } repository.Verify(r => r.CreateHistory(It.Is <HistoryRecord>(h => h.Status == HistoryStatus.Failed), It.IsAny <IDbTransaction>())); repository.Verify(r => r.CreateQueued(It.Is <QueueRecord>(q => q.TryNumber == 2), It.IsAny <IDbTransaction>())); }
public void SerializationSerialize() { var job = new TestSerializationJob() { A = new Guid("1854ef1b-3937-476a-8b32-56436a7b6feb").ToString(), B = "Hello, world!", C = new DateTime(1982, 5, 28).ToUniversalTime() }; string data = JobSerializer.Serialize(job); Assert.AreEqual(@"{""A"":""1854ef1b-3937-476a-8b32-56436a7b6feb"",""B"":""Hello, world!"",""C"":""1982-05-28T07:00:00Z""}", data); }
public void SerializationDeserialize() { string typeName = JobSerializer.GetTypeName(typeof(TestSerializationJob)); string data = @"{""A"":""1854ef1b-3937-476a-8b32-56436a7b6feb"",""B"":""Hello, world!"",""C"":""1982-05-28T07:00:00Z""}"; TestSerializationJob job = JobSerializer.Deserialize(typeName, data) as TestSerializationJob; Assert.IsNotNull(job); Assert.AreEqual("1854ef1b-3937-476a-8b32-56436a7b6feb", job.A); Assert.AreEqual("Hello, world!", job.B); Assert.AreEqual(new DateTime(1982, 5, 28).ToUniversalTime(), job.C); Assert.IsNotNull(JobSerializer.Deserialize(typeName, null)); }
public void Push(IJob <IJobData> job) { BsonDocument document = JobSerializer.ToBsonDocument(job); Logging.Log().Debug("Inserting job into MongoDB queue: {document}", JobSerializer.ToJson(job)); var db = this.client.GetDatabase(DatabaseName); var collection = db.GetCollection <BsonDocument>(CollectionName); collection.InsertOne(document); Logging.Log().Debug("{document} inserted into MongoDB queue.", document); }
protected virtual bool ValidateJobType(string typeName, string data, out IJob job, out string errorMessage) { bool success = false; job = null; errorMessage = string.Empty; if (string.IsNullOrEmpty(typeName)) { throw new ArgumentNullException("typeName", "typeName cannot be empty."); } data = (data ?? string.Empty).Trim(); if (string.IsNullOrEmpty(data)) { data = "{}"; } try { job = JobSerializer.Deserialize(typeName, data); success = true; } catch (ArgumentException) { errorMessage = "Job type contains invalid type syntax or does not implement IJob."; } catch (TargetInvocationException) { errorMessage = "Job type's class initializer threw an exception."; } catch (TypeLoadException) { errorMessage = "Failed to load job type."; } catch (FileNotFoundException) { errorMessage = "Job type or one of its dependencies was not found."; } catch (FileLoadException) { errorMessage = "Job type or one of its dependencies could not be loaded."; } catch (BadImageFormatException) { errorMessage = "Job type's assembly that could not be loaded into the current runtime."; } return(success); }
public void WorkerDequeue() { IJob job = new TestJob() { Id = Guid.NewGuid() }; SignalsRecord signals = new SignalsRecord() { QueueNames = "*", WorkerSignal = WorkerSignal.None, WorkingSignal = WorkingSignal.None }; QueueRecord queued = new QueueRecord() { Id = 12, ApplicationName = BlueCollarSection.Section.ApplicationName, Data = JsonConvert.SerializeObject(job), JobName = job.Name, JobType = JobSerializer.GetTypeName(job.GetType()), QueuedOn = DateTime.UtcNow, QueueName = "*", TryNumber = 1 }; var transaction = new Mock <IDbTransaction>(); var repository = new Mock <IRepository>(); repository.Setup(r => r.BeginTransaction()).Returns(transaction.Object); repository.Setup(r => r.BeginTransaction(It.IsAny <IsolationLevel>())).Returns(transaction.Object); repository.Setup(r => r.GetQueued(It.IsAny <string>(), It.IsAny <QueueNameFilters>(), It.IsAny <DateTime>(), It.IsAny <IDbTransaction>())).Returns(queued); repository.Setup(r => r.GetWorkingSignals(It.IsAny <long>(), It.IsAny <long?>(), It.IsAny <IDbTransaction>())).Returns(signals); var factory = new Mock <IRepositoryFactory>(); factory.Setup(f => f.Create()).Returns(repository.Object); var logger = new Mock <ILogger>(); using (Worker worker = new Worker(BlueCollarSection.Section.ApplicationName, 1, "Test Worker", null, 1, false, factory.Object, logger.Object)) { worker.Start(); Thread.Sleep(1500); } repository.Verify(r => r.GetQueued(BlueCollarSection.Section.ApplicationName, It.IsAny <QueueNameFilters>(), It.IsAny <DateTime>(), It.IsAny <IDbTransaction>())); repository.Verify(r => r.DeleteQueued(12, It.IsAny <IDbTransaction>())); repository.Verify(r => r.CreateWorking(It.Is <WorkingRecord>(w => w.ApplicationName == BlueCollarSection.Section.ApplicationName && w.WorkerId == 1), It.IsAny <IDbTransaction>())); }
public IJob <IJobData> Pull() { if (jobs.Count > 0) { var jsonJob = jobs.First(); var job = JobSerializer.Deserialize(jsonJob.Value); var jobFilePath = Path.Combine(this.queuePath, jsonJob.Key + ".json"); File.Delete(jobFilePath); jobs.Remove(jsonJob.Key); return(job); } return(null); }
public void SchedulerRefreshSchedules() { ScheduleRecord schedule = new ScheduleRecord() { ApplicationName = BlueCollarSection.Section.ApplicationName, Enabled = true, Id = 1, Name = "Test", QueueName = "*", RepeatType = ScheduleRepeatType.Days, RepeatValue = 1, StartOn = DateTime.UtcNow.FloorWithSeconds() }; ScheduledJobRecord scheduledJob = new ScheduledJobRecord() { Data = @"{""SleepDuration"":1000}", Id = 1, JobType = JobSerializer.GetTypeName(typeof(TestJob)), Schedule = schedule, ScheduleId = 1 }; schedule.ScheduledJobs.Add(scheduledJob); var transaction = new Mock <IDbTransaction>(); var repository = new Mock <IRepository>(); repository.Setup(r => r.BeginTransaction()).Returns(transaction.Object); repository.Setup(r => r.GetSchedules(BlueCollarSection.Section.ApplicationName, It.IsAny <IDbTransaction>())).Returns(new ScheduleRecord[] { schedule }); var factory = new Mock <IRepositoryFactory>(); factory.Setup(f => f.Create()).Returns(repository.Object); var logger = new Mock <ILogger>(); Scheduler scheduler = new Scheduler(1, BlueCollarSection.Section.ApplicationName, QueueNameFilters.Any(), 1, factory.Object, logger.Object); Assert.AreEqual(0, scheduler.Schedules.Count()); scheduler.RefreshSchedules(); Assert.AreEqual(1, scheduler.Schedules.Count()); }
protected override void ProcessRecord() { List <Job> jobList = JobSerializer.Deserialize(MDPath); if (Json) { Item.JsonIndented = JsonIndented; WriteObject(JobSerializer.Serialize(jobList, Item.EXTENSION_JSON)); Item.JsonIndented = false; } else if (Xml) { WriteObject(JobSerializer.Serialize(jobList, Item.EXTENSION_XML)); } else { WriteObject(JobSerializer.Serialize(jobList, Item.EXTENSION_MD)); } }
public void Push(IJob <IJobData> job) { Guid newGuid; do { newGuid = Guid.NewGuid(); }while (jobs.ContainsKey(newGuid)); string json = JobSerializer.ToJson(job); Logging.Log().Debug("Inserting job into FileSystem queue: {json}", json); this.jobs.Add(newGuid, json); string path = Path.Combine(this.queuePath, newGuid.ToString() + ".json"); File.WriteAllText(path, json); }
public override string ToString() { var sb = new StringBuilder(); sb.Append(Hostname); if (Port != 11300) { sb.Append(":" + Port); } if (JobSerializer != null) { sb.Append("; JobSerializer=" + JobSerializer.ToString()); } if (Logger != null) { sb.Append("; Logger=" + Logger.ToString()); } return(sb.ToString()); }
public IJob <IJobData> Pull() { var db = this.client.GetDatabase(DatabaseName); var collection = db.GetCollection <BsonDocument>(CollectionName); var filter = Builders <BsonDocument> .Filter.Eq("Status", JobStatus.Enqueued.ToString()); var update = Builders <BsonDocument> .Update.Set("Status", JobStatus.InProgress.ToString()); var doc = collection.FindOneAndUpdate(filter, update); if (doc != null) { //lame but works string id = doc.GetElement("_id").Value.ToString(); doc.Remove("_id"); var jsonJob = doc.ToJson(); var job = JobSerializer.Deserialize(jsonJob); job.Id = id; return(job); } return(null); }
public void BenchmarkDequeueAndExecute1000Jobs() { ManualResetEvent handle = new ManualResetEvent(false); Queue <QueueRecord> queue = new Queue <QueueRecord>(); TestJob job = new TestJob() { SleepDuration = 10 }; string typeName = JobSerializer.GetTypeName(job.GetType()); for (int i = 0; i < 1000; i++) { job.Id = Guid.NewGuid(); queue.Enqueue( new QueueRecord() { Id = i + 1, ApplicationName = BlueCollarSection.Section.ApplicationName, Data = JobSerializer.Serialize(job), JobName = job.Name, JobType = typeName, QueuedOn = DateTime.UtcNow, QueueName = "*", TryNumber = 1 }); } SignalsRecord signals = new SignalsRecord() { QueueNames = "*", WorkerSignal = WorkerSignal.None, WorkingSignal = WorkingSignal.None }; var transaction = new Mock <IDbTransaction>(); var repository = new Mock <IRepository>(); repository.Setup(r => r.BeginTransaction()).Returns(transaction.Object); repository.Setup(r => r.BeginTransaction(It.IsAny <IsolationLevel>())).Returns(transaction.Object); repository.Setup(r => r.CreateWorking(It.IsAny <WorkingRecord>(), It.IsAny <IDbTransaction>())).Returns((WorkingRecord r, IDbTransaction t) => { r.Id = 1; return(r); }); repository.Setup(r => r.GetWorkingSignals(It.IsAny <long>(), It.IsAny <long?>(), It.IsAny <IDbTransaction>())).Returns(signals); repository.Setup(r => r.GetQueued(It.IsAny <string>(), It.IsAny <QueueNameFilters>(), It.IsAny <DateTime>(), It.IsAny <IDbTransaction>())) .Returns( () => { var r = queue.Dequeue(); if (queue.Count == 0) { handle.Set(); } return(r); }); var factory = new Mock <IRepositoryFactory>(); factory.Setup(f => f.Create()).Returns(repository.Object); var logger = new Mock <ILogger>(); Stopwatch stopwatch = new Stopwatch(); using (Worker worker = new Worker(BlueCollarSection.Section.ApplicationName, 1, "Test Worker", QueueNameFilters.Any(), 1, false, factory.Object, logger.Object)) { stopwatch.Start(); worker.Start(); handle.WaitOne(); worker.Stop(false); stopwatch.Stop(); } this.TestContext.WriteLine("1,000 jobs with 10ms execution times were dequeued and executed in {0:N3}s.", stopwatch.Elapsed.TotalSeconds); }
private static async Task <int> Run( Configuration configuration, string scenarioName, string session, JObject commandLineVariables, int iterations, int exclude, string shutdownEndpoint, TimeSpan span, //List<string> downloadFiles, //bool fetch, //string fetchDestination, //bool collectR2RLog, // string traceDestination, // CommandOption scriptFileOption, CommandOption markdownOption ) { if (_sqlConnectionStringOption.HasValue()) { await JobSerializer.InitializeDatabaseAsync(_sqlConnectionStringOption.Value(), _tableName); } // Storing the list of services to run as part of the selected scenario var dependencies = configuration.Scenarios[scenarioName].Select(x => x.Key).ToArray(); var results = new List <Statistics>(); Log.Write($"Running session '{session}' with description '{_descriptionOption.Value()}'"); for (var i = 1; i <= iterations; i++) { if (iterations > 1) { Log.Write($"Job {i} of {iterations}"); } var jobsByDependency = new Dictionary <string, List <JobConnection> >(); foreach (var jobName in dependencies) { var service = configuration.Jobs[jobName]; service.DriverVersion = 2; var jobs = service.Endpoints.Select(endpoint => new JobConnection(service, new Uri(endpoint))).ToList(); jobsByDependency.Add(jobName, jobs); foreach (var job in jobs) { if (!String.IsNullOrEmpty(service.Options.RequiredOperatingSystem)) { var info = await job.GetInfoAsync(); var os = info["os"]?.ToString(); if (!String.Equals(os, service.Options.RequiredOperatingSystem, StringComparison.OrdinalIgnoreCase)) { Log.Write($"Scenario skipped as the agent doesn't match the OS constraint ({service.Options.RequiredOperatingSystem}) on service '{jobName}'"); return(0); } } } var variables = MergeVariables(configuration.Variables, service.Variables, commandLineVariables); // Format arguments if (FluidTemplate.TryParse(service.Arguments, out var template)) { service.Arguments = template.Render(new TemplateContext { Model = variables }); } // Start this group of jobs await Task.WhenAll( jobs.Select(job => { // Start server return(job.StartAsync( jobName, _outputArchiveOption, _buildArchiveOption, _outputFileOption, _buildFileOption )); }) ); foreach (var job in jobs) { job.StartKeepAlive(); } if (service.WaitForExit) { // Wait for all clients to stop while (jobs.Any(client => client.Job.State != ServerState.Stopped && client.Job.State != ServerState.Failed)) { // Refresh the local state foreach (var job in jobs) { await job.TryUpdateStateAsync(); } await Task.Delay(1000); } // Stop a blocking job await Task.WhenAll(jobs.Select(job => job.StopAsync())); await Task.WhenAll(jobs.Select(job => job.DownloadAssetsAsync(jobName))); await Task.WhenAll(jobs.Select(job => job.DeleteAsync())); } } // Download traces, before the jobs are stopped foreach (var jobName in dependencies) { var service = configuration.Jobs[jobName]; var jobConnections = jobsByDependency[jobName]; foreach (var jobConnection in jobConnections) { // Download trace if (jobConnection.Job.DotNetTrace) { try { var traceDestination = jobConnection.Job.Options.TraceOutput; if (String.IsNullOrWhiteSpace(traceDestination)) { traceDestination = jobName; } var traceExtension = ".nettrace"; if (!traceDestination.EndsWith(traceExtension, StringComparison.OrdinalIgnoreCase)) { traceDestination = traceDestination + "." + DateTime.Now.ToString("MM-dd-HH-mm-ss") + traceExtension; } Log.Write($"Collecting trace file '{traceDestination}' ..."); await jobConnection.DownloadDotnetTrace(traceDestination); } catch (Exception e) { Log.Write($"Error while fetching published assets for '{jobName}'"); Log.Verbose(e.Message); } } } } // Stop all jobs in reverse dependency order (clients first) foreach (var jobName in dependencies) { var service = configuration.Jobs[jobName]; if (!service.WaitForExit) { var jobs = jobsByDependency[jobName]; await Task.WhenAll(jobs.Select(job => job.StopAsync())); await Task.WhenAll(jobs.Select(job => job.DownloadAssetsAsync(jobName))); await Task.WhenAll(jobs.Select(job => job.DeleteAsync())); } } // Display results foreach (var jobName in dependencies) { var service = configuration.Jobs[jobName]; var jobConnections = jobsByDependency[jobName]; if (!service.Options.DiscardResults && !service.Options.DisplayOutput && !service.Options.DisplayBuild) { Log.Quiet(""); Log.Quiet($"{jobName}"); Log.Quiet($"-------"); } foreach (var jobConnection in jobConnections) { // Convert any json result to an object NormalizeResults(jobConnections); if (!service.Options.DiscardResults) { WriteMeasures(jobConnection); } // Display output log if (jobConnection.Job.Options.DisplayOutput) { Log.Quiet(""); Log.Quiet("Output:"); Log.Quiet(""); Log.DisplayOutput(jobConnection.Job.Output); } // Display build log if (jobConnection.Job.Options.DisplayBuild) { try { Log.Quiet(""); Log.Quiet("Build:"); Log.Quiet(""); Log.DisplayOutput(await jobConnection.DownloadBuildLog()); } catch (Exception e) { Log.Write($"Error while downloading build logs"); Log.Verbose(e.Message); } } } } var jobResults = await CreateJobResultsAsync(configuration, dependencies, jobsByDependency); foreach (var property in _propertyOption.Values) { var segments = property.Split('=', 2); jobResults.Properties[segments[0]] = segments[1]; } // Save results if (_outputOption.HasValue()) { var filename = _outputOption.Value(); if (File.Exists(filename)) { File.Delete(filename); } await File.WriteAllTextAsync(filename, JsonConvert.SerializeObject(jobResults, Formatting.Indented, new JsonSerializerSettings { ContractResolver = new CamelCasePropertyNamesContractResolver() })); Log.Write("", notime: true); Log.Write($"Results saved in '{new FileInfo(filename).FullName}'", notime: true); } // Store data if (_sqlConnectionStringOption.HasValue()) { await JobSerializer.WriteJobResultsToSqlAsync(jobResults, _sqlConnectionStringOption.Value(), _tableName, session, _scenarioOption.Value(), _descriptionOption.Value()); } } return(0); }