public void JobSearchMode() { var service = Connect(); JobArgs jobArgs = new JobArgs(Query); jobArgs.SearchMode = SearchMode.Normal; Job job = service.CreateJobAsync(jobArgs).Result; Assert.NotNull(job); jobArgs.SearchMode = SearchMode.Realtime; job.UpdateJobArgs(jobArgs).Wait(); Assert.NotNull(job); //try //{ // jobArgs.SearchMode = SearchMode.None; // job.UpdateJobArgs(jobArgs).Wait(); // Assert.Fail("SearchMode.None should return 400 invalid query error"); //} //catch (Exception e) //{ // Assert.True(e.InnerException.Message.Contains("400")); //} job.CancelAsync().Wait(); }
/// <summary> /// Do real time seach /// </summary> /// <param name="service">The service.</param> /// <returns></returns> static async Task SearchRealTime(Service service) { await service.LoginAsync("admin", "changeme"); // Realtime window is 5 minutes var jobArgs = new JobArgs("search index=_internal") { SearchMode = SearchMode.Realtime, EarliestTime = "rt-5m", LatestTime = "rt", }; Job job = await service.CreateJobAsync(jobArgs); //this sleep should be removed if DVPL-4503 is fixed. //Thread.Sleep(5000); for (var i = 0; i < 5; i++) { System.Console.WriteLine("============================= Snapshot {0}=================================", i); using (SearchResults searchResults = await job.GetSearchResultsPreviewAsync()) { System.Diagnostics.Debug.Assert(!searchResults.AreFinal); System.Console.WriteLine("searchResults count:{0}", searchResults.Count()); } Thread.Sleep(1000); } await job.CancelAsync(); await service.LogoffAsync(); }
public void JobExecutionMode() { var service = Connect(); JobArgs jobArgs = new JobArgs(Query); jobArgs.ExecutionMode = ExecutionMode.Blocking; Job job = service.CreateJobAsync(jobArgs).Result; Assert.NotNull(job); jobArgs.ExecutionMode = ExecutionMode.None; job.UpdateJobArgs(jobArgs).Wait(); Assert.NotNull(job); jobArgs.ExecutionMode = ExecutionMode.Normal; job.UpdateJobArgs(jobArgs).Wait(); Assert.NotNull(job); jobArgs.ExecutionMode = ExecutionMode.Oneshot; job.UpdateJobArgs(jobArgs).Wait(); Assert.NotNull(job); job.CancelAsync().Wait(); }
/// <summary> /// Called when [search]. /// </summary> /// <param name="service">The service.</param> /// <returns></returns> static async Task OneshotSearch(Service service) { //// Login await service.LoginAsync("admin", "changeme"); // Simple oneshot search using (SearchResults searchResults = await service.SearchOneshotAsync("search index=_internal | head 5")) { foreach (Result record in searchResults) { Console.WriteLine(record); } } //Use JobArgs to define the search JobArgs args = new JobArgs("search index=_internal | head 5") { AutoCancel = 0, }; using (SearchResults searchResults = await service.SearchOneshotAsync(args)) { foreach (Result record in searchResults) { Console.WriteLine(record); } } await service.LogoffAsync(); }
/// <summary> /// Restores the original context user /// </summary> /// <param name="args">The arguments.</param> public static void RestoreContextUser(JobArgs args) { if (args.Job.Options.ContextUser != null) { UserSwitcher.Exit(); } }
public void AddJobTest() { MobileContent mobileContent = new MobileContent() { TenantId = 100002, UserId = 112664957, ToUserId = 112664957, Content = "hello", OId = "6cd57b3b-dab9-44f8-be16-9ae974b8e523", RunTime = new DateTime(2019, 1, 17, 20, 0, 0) }; JobArgs args = new JobArgs() { TenantId = mobileContent.TenantId, OperatorId = mobileContent.UserId, JobKey = mobileContent.OId, Job = new TestJob() { Args = mobileContent.ToJson() }, RunTime = mobileContent.RunTime, LimitSeconds = 10, JobAppName = "UPaaSDemo" }; JobProvider.AddJob(args); Assert.Fail(); }
/// <summary> /// Execute selected job /// </summary> /// <param name="args">Consist parameter for executing selected job</param> static void Main(string[] args) { //Need to create an Object to hold args if (!args.Any()) { return; } if (!EventLog.SourceExists(logSource)) { EventLog.CreateEventSource(logSource, "Application"); } JobArgs jobArgs = JobsExtension.BuildJobArgs(args); //int maxArgCouint = 3; //IJob job = null; IJob job = JobsExtension.ConfigureJob(jobArgs); //EventLog.WriteEntry(logSource, "WBAGScheduler Started. Args[0]: " + args[0] ); // var jobStatus = job.Process(); job.ProcessNotifications(jobStatus); }
public void BadOutputMode() { var service = Connect(); JobArgs jobArgs = new JobArgs("invalidpart" + Query); //jobArgs.outuputMode = badOutputMode; Job job = null; try { job = service.CreateJobAsync(jobArgs).Result; } catch (Exception e) { if (!e.InnerException.Message.ToLower().Contains("400: bad request")) { throw; } } finally { if (job != null) { job.CancelAsync().Wait(); } } }
public async Task JobPreviewDefaultsToAll() { using (var service = await SdkHelper.CreateService()) { JobArgs jobArgs = new JobArgs(); var job = await service.Jobs.CreateAsync("search index=_* | head 101", args : jobArgs); for (int delay = 1000; delay < 5000; delay += 1000) { try { await job.TransitionAsync(DispatchState.Done, delay); break; } catch (TaskCanceledException) { } } using (SearchResultStream stream = await job.GetSearchPreviewAsync()) { // Is the result preview count greater than the default of 100? Assert.Equal(101, job.ResultPreviewCount); } await job.CancelAsync(); } }
public static JobArgs DoParseJobLine(string line) { var def = (line ?? "").Trim().Replace("\t", " "); if (string.IsNullOrWhiteSpace(def) || def[0] == '#') return null; var result = new JobArgs(); if (def[0] == '@') { var spec = StringParser.ExtractWord(ref def).ToLower(); switch (spec) { case "@service": result.Service = true; result.Command = def; return result; case "@reboot": result.Reboot = true; result.Command = def; return result; case "@yearly": def = "0 0 1 1 * " + def; break; case "@annually": def = "0 0 1 1 * " + def; break; case "@monthly": def = "0 0 1 * * " + def; break; case "@weekly": def = "0 0 * * 0 " + def; break; case "@daily": def = "0 0 * * * " + def; break; case "@hourly": def = "0 * * * * " + def; break; default: return null; } } try { result.Minute = StringParser.ExtractWord(ref def); result.Hour = StringParser.ExtractWord(ref def); result.Day = StringParser.ExtractWord(ref def); result.Month = StringParser.ExtractWord(ref def); result.Weekday = StringParser.ExtractWord(ref def); result.Command = def; return result; } catch (Exception) { return null; } }
public static JobArgs BuildJobArgs(string[] args) { //int maxArgCnt = args.Count(); JobArgs jobArgs = new JobArgs() { TriggeredBy = args.Length >= 3 ? args[2].Replace('_', ' ') : "SYSTEM", FileName = args.Length >= 4 ? args[3].Replace('%', ' ') : null, }; switch (args[0].ToString().ToLower()) { case "announcement": case "ann": { jobArgs.JobType = Models.JobType.Announceemnt; if (args.Length >= 2) { jobArgs.FirstAnnouncedDate = DateTime.ParseExact(args[1], "MM/dd/yyyy", System.Globalization.CultureInfo.InvariantCulture); } break; } case "pipelineorder": case "po": { jobArgs.JobType = Models.JobType.PipelineOrder; break; } case "pricereport": case "pr": { jobArgs.JobType = Models.JobType.PriceReport; break; } case "qcreport": case "qc": { jobArgs.JobType = Models.JobType.QCReport; break; } case "titlereport": case "tr": { jobArgs.JobType = Models.JobType.TitleReport; break; } } return(jobArgs); }
public void BadSearchModeExport() { var service = Connect(); JobArgs jobArgs = new JobArgs(Query); //jobArgs.SearchMode = SearchMode.Realtime; Job job = service.CreateJobAsync(jobArgs).Result; Assert.NotNull(job); job.CancelAsync().Wait(); }
public void JobSearchModeArgument() { var type = typeof(SearchMode); JobArgs jobArgs = new JobArgs(Query); RunJobForEachEnum( type, (mode) => new JobArgs(Query) { SearchMode = (SearchMode)Enum.Parse( type, mode) }); }
void CanConstruct() { var expectedArguments = new Argument[] { new Argument("search", "some unchecked search string") }; string[] expectedString = new string[] { "auto_cancel=0; auto_finalize_ec=0; auto_pause=0; earliest_time=null; enable_lookups=t; exec_mode=normal; force_bundle_replication=f; id=null; index_earliest=null; index_latest=null; latest_time=null; max_count=10000; max_time=0; namespace=null; now=null; reduce_freq=0; reload_macros=t; remote_server_list=null; reuse_max_seconds_ago=0; rf=null; rt_blocking=f; rt_indexfilter=f; rt_maxblocksecs=60; rt_queue_size=10000; search=null; search_listener=null; search_mode=normal; spawn_process=t; status_buckets=0; sync_bundle_replication=f; time_format=null; timeout=86400", "auto_cancel=0; auto_finalize_ec=0; auto_pause=0; earliest_time=null; enable_lookups=t; exec_mode=normal; force_bundle_replication=f; id=null; index_earliest=null; index_latest=null; latest_time=null; max_count=10000; max_time=0; namespace=null; now=null; reduce_freq=0; reload_macros=t; remote_server_list=null; reuse_max_seconds_ago=0; rf=null; rt_blocking=f; rt_indexfilter=f; rt_maxblocksecs=60; rt_queue_size=10000; search=some unchecked search string; search_listener=null; search_mode=normal; spawn_process=t; status_buckets=0; sync_bundle_replication=f; time_format=null; timeout=86400" }; string search = "some unchecked search string"; var args = new JobArgs(search); Assert.Equal(expectedString[1], args.ToString()); Assert.Equal(expectedArguments, args); }
public void Search() { Service service = Connect(); //Job job; JobArgs jobArgs = new JobArgs(Query); service.CreateJobAsync(jobArgs).Wait(); //jobArgs.e="csv" service.CreateJobAsync(jobArgs).Wait(); //this.RunWait(service, Query); //job.Results(new Args("output_mode", "json")).Close(); //job.Cancel(); }
public async Task JobResultsDefaultsToAll() { using (var service = await SdkHelper.CreateService()) { JobArgs jobArgs = new JobArgs(); var job = await service.Jobs.CreateAsync("search index=_* | head 101", args : jobArgs); using (SearchResultStream stream = await job.GetSearchResultsAsync()) { // Is the result count greater than the default of 100? Assert.Equal(101, job.ResultCount); } await job.CancelAsync(); } }
public async Task JobSearchMode() { using (var service = await SdkHelper.CreateService()) { JobArgs jobArgs = new JobArgs(); jobArgs.SearchMode = SearchMode.Normal; Job job = await service.Jobs.CreateAsync(Search, args: jobArgs); Assert.NotNull(job); jobArgs.SearchMode = SearchMode.RealTime; bool updatedSnapshot = await job.UpdateAsync(jobArgs); Assert.True(updatedSnapshot); await job.CancelAsync(); } }
public void RemoteServerList() { var array = "first,second";// new string[] { "first", "second" }; var args1 = new SearchExportArgs("") { RemoteServerList = array, }; Assert.Equal("first,second", args1.RemoteServerList); var args2 = new JobArgs("") { RemoteServerList = array, }; Assert.Equal("first,second", args2.RemoteServerList); }
public async Task JobSearchMode() { using (var service = await SdkHelper.CreateService()) { JobArgs jobArgs = new JobArgs(); jobArgs.SearchMode = SearchMode.Normal; Job job = await service.Jobs.CreateAsync(Search, args : jobArgs); Assert.NotNull(job); jobArgs.SearchMode = SearchMode.RealTime; bool updatedSnapshot = await job.UpdateAsync(jobArgs); Assert.True(updatedSnapshot); await job.CancelAsync(); } }
public void RemoteServerList() { const string ParamName = "remote_server_list"; var array = new string[] { "first", "second" }; var args1 = new JobArgs { RemoteServerList = array, }; Assert.AreEqual("first,second", args1[ParamName]); var args2 = new JobExportArgs { RemoteServerList = array, }; Assert.AreEqual("first,second", args2[ParamName]); }
///// <summary> ///// Tests the result from a bad search argument. ///// </summary> //[Trait("class", "Service")] //[Fact] //[ExpectedException(typeof(WebException), // "Bad argument should cause Splunk to return http 400: Bad Request")] //public void BadOutputModeExport() //{ // var service = Connect(); // service.Export(Query, badOutputMode); //} ///// <summary> ///// Tests all output modes for Job.Results ///// </summary> //[Trait("class", "Service")] //[Fact] //public void JobResultsOutputModeArgument() //{ // var type = typeof(JobResultsArgs.OutputModeEnum); // RunJobFuntionForEachEnum( // type, // (job, mode) => // job.Results( // new JobResultsArgs // { // OutputMode = // (JobResultsArgs.OutputModeEnum)Enum.Parse( // type, // mode) // })); //} ///// <summary> ///// Unittest for DVPL-2678, make sure the result stream can be read through. ///// </summary> //[Trait("class", "Service")] //[Fact] //public void JobResultStream() //{ // var cli = SplunkSDKHelper.Command.Splunk("search"); // cli.AddRule("search", typeof(string), "search string"); // cli.Opts["search"] = "search index=_internal * | head 10 "; // var service = Service.Connect(cli.Opts); // var jobs = service.GetJobs(); // var job = jobs.Create((string)cli.Opts["search"]); // while (!job.IsDone) // { // System.Threading.Thread.Sleep(1000); // } // var outArgs = new JobResultsArgs // { // OutputMode = JobResultsArgs.OutputModeEnum.Xml, // Count = 0 // }; // try // { // using (var stream = job.Results(outArgs)) // { // using (var rr = new ResultsReaderXml(stream)) // { // foreach (var @event in rr) // { // System.Console.WriteLine("EVENT:"); // GC.Collect(); // foreach (string key in @event.Keys) // { // System.Console.WriteLine(" " + key + " -> " + @event[key]); // } // } // } // } // } // catch (Exception e) // { // Assert.Fail(string.Format("Reading Job result throw exception : {0} ", e)); // } // try // { // using (var stream = service.Export((string)cli.Opts["search"])) // { // using (var rr = new ResultsReaderXml(stream)) // { // foreach (var @event in rr) // { // System.Console.WriteLine("EVENT:"); // GC.Collect(); // foreach (string key in @event.Keys) // { // System.Console.WriteLine(" " + key + " -> " + @event[key]); // } // } // } // } // } // catch (Exception e) // { // Assert.Fail(string.Format("Export result throw exception : {0} ", e)); // } //} ///// <summary> ///// Tests all output modes for Job.ResultsPreview ///// </summary> //[Trait("class", "Service")] //[Fact] //public void JobResultsPreviewOutputModeArgument() //{ // var type = typeof(SearchResultsArgs); // RunJobFuntionForEachEnum( // type, // (job, mode) => // job.GetSearchResultsPreviewAsync( // new SearchResultsArgs // { // OutputMode = // (SearchResultsArgs)Enum.Parse( // type, // mode) // })); //} ///// <summary> ///// Tests all output modes for Job.Events ///// </summary> //[Trait("class", "Service")] //[Fact] //public void JobEventsOutputModeArgument() //{ // var type = typeof(JobEventsArgs.OutputModeEnum); // RunJobFuntionForEachEnum( // type, // (job, mode) => // job.Events( // new JobEventsArgs // { // OutputMode = // (JobEventsArgs.OutputModeEnum)Enum.Parse( // type, // mode) // })); //} /// <summary> /// Run a job and a function on the job /// for each enum value in an enum type. /// </summary> /// <param name="enumType">The enum type</param> /// <param name="jobFunction"> /// A function for a job and an enum value /// </param> private void RunJobFuntionForEachEnum( Type enumType, Func <Job, string, SearchResults> jobFunction) { var service = Connect(); JobArgs jobArgs = new JobArgs(Query); ForEachEnum( enumType, (@enum) => { var job = this.RunWait(service, jobArgs); jobFunction(job, @enum); job.CancelAsync().Wait(); }); }
public async Task QueuedSearchCreate() { const string searchPrefix = "search index=_internal "; int i = 0; using (var service = await SdkHelper.CreateService()) { List <Job> jobs = new List <Job>(); Job job = null; do { JobArgs jobArgs = new JobArgs(); jobArgs.SearchMode = SearchMode.RealTime; try { // Jobs should eventually be queued w/o waiting for them to get to running state job = await service.Jobs.Create(searchPrefix + i.ToString(), args : jobArgs); Assert.Equal(DispatchState.None, job.DispatchState); await job.GetAsync(); jobs.Add(job); i++; } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex.GetBaseException().ToString()); break; } } while (job.DispatchState != DispatchState.Queued); Assert.NotNull(job); Assert.Equal(DispatchState.Queued, job.DispatchState); // Cleanup foreach (Job j in jobs) { await j.CancelAsync(); } } Assert.True(i > 0); }
void CanConstructJobArgs() { var args = new JobArgs(); Assert.Equal( "auto_cancel=null; " + "auto_finalize_ec=null; " + "auto_pause=null; " + "earliest_time=null; " + "enable_lookups=null; " + "force_bundle_replication=null; " + "id=null; " + "index_earliest=null; " + "index_latest=null; " + "latest_time=null; " + "max_count=null; " + "max_time=null; " + "namespace=null; " + "now=null; " + "reduce_freq=null; " + "reload_macros=null; " + "remote_server_list=null; " + "reuse_max_seconds_ago=null; " + "rf=null; " + "rt_blocking=null; " + "rt_indexfilter=null; " + "rt_maxblocksecs=null; " + "rt_queue_size=null; " + "search_listener=null; " + "search_mode=null; " + "spawn_process=null; " + "status_buckets=null; " + "sync_bundle_replication=null; " + "time_format=null; " + "timeout=null", args.ToString()); Assert.Equal(0, args.Count()); }
public async Task JobEventsTruncationModeArgument() { using (var service = await SdkHelper.CreateService()) { JobArgs jobArgs = new JobArgs(); await ForEachEnum(typeof(TruncationMode), async enumValue => { var job = await service.Jobs.CreateAsync(Search, args: jobArgs); var args = new SearchEventArgs { TruncationMode = (TruncationMode)Enum.Parse(typeof(TruncationMode), enumValue) }; using (SearchResultStream stream = await job.GetSearchEventsAsync(args)) { } await job.CancelAsync(); }); } }
///// <summary> ///// Tests the result from a bad search argument. ///// </summary> //[Trait("class", "Service")] //[Fact] //[ExpectedException(typeof(WebException), // "Bad argument should cause Splunk to return http 400: Bad Request")] //public void BadOutputModeExport() //{ // var service = Connect(); // service.Export(Query, badOutputMode); //} ///// <summary> ///// Tests all output modes for Job.Results ///// </summary> //[Trait("class", "Service")] //[Fact] //public void JobResultsOutputModeArgument() //{ // var type = typeof(JobResultsArgs.OutputModeEnum); // RunJobFuntionForEachEnum( // type, // (job, mode) => // job.Results( // new JobResultsArgs // { // OutputMode = // (JobResultsArgs.OutputModeEnum)Enum.Parse( // type, // mode) // })); //} ///// <summary> ///// Unittest for DVPL-2678, make sure the result stream can be read through. ///// </summary> //[Trait("class", "Service")] //[Fact] //public void JobResultStream() //{ // var cli = SplunkSDKHelper.Command.Splunk("search"); // cli.AddRule("search", typeof(string), "search string"); // cli.Opts["search"] = "search index=_internal * | head 10 "; // var service = Service.Connect(cli.Opts); // var jobs = service.GetJobs(); // var job = jobs.Create((string)cli.Opts["search"]); // while (!job.IsDone) // { // System.Threading.Thread.Sleep(1000); // } // var outArgs = new JobResultsArgs // { // OutputMode = JobResultsArgs.OutputModeEnum.Xml, // Count = 0 // }; // try // { // using (var stream = job.Results(outArgs)) // { // using (var rr = new ResultsReaderXml(stream)) // { // foreach (var @event in rr) // { // System.Console.WriteLine("EVENT:"); // GC.Collect(); // foreach (string key in @event.Keys) // { // System.Console.WriteLine(" " + key + " -> " + @event[key]); // } // } // } // } // } // catch (Exception e) // { // Assert.Fail(string.Format("Reading Job result throw exception : {0} ", e)); // } // try // { // using (var stream = service.Export((string)cli.Opts["search"])) // { // using (var rr = new ResultsReaderXml(stream)) // { // foreach (var @event in rr) // { // System.Console.WriteLine("EVENT:"); // GC.Collect(); // foreach (string key in @event.Keys) // { // System.Console.WriteLine(" " + key + " -> " + @event[key]); // } // } // } // } // } // catch (Exception e) // { // Assert.Fail(string.Format("Export result throw exception : {0} ", e)); // } //} ///// <summary> ///// Tests all output modes for Job.ResultsPreview ///// </summary> //[Trait("class", "Service")] //[Fact] //public void JobResultsPreviewOutputModeArgument() //{ // var type = typeof(SearchResultsArgs); // RunJobFuntionForEachEnum( // type, // (job, mode) => // job.GetSearchResultsPreviewAsync( // new SearchResultsArgs // { // OutputMode = // (SearchResultsArgs)Enum.Parse( // type, // mode) // })); //} ///// <summary> ///// Tests all output modes for Job.Events ///// </summary> //[Trait("class", "Service")] //[Fact] //public void JobEventsOutputModeArgument() //{ // var type = typeof(JobEventsArgs.OutputModeEnum); // RunJobFuntionForEachEnum( // type, // (job, mode) => // job.Events( // new JobEventsArgs // { // OutputMode = // (JobEventsArgs.OutputModeEnum)Enum.Parse( // type, // mode) // })); //} /// <summary> /// Run a job and a function on the job /// for each enum value in an enum type. /// </summary> /// <param name="enumType">The enum type</param> /// <param name="jobFunction"> /// A function for a job and an enum value /// </param> private void RunJobFuntionForEachEnum( Type enumType, Func<Job, string, SearchResults> jobFunction) { var service = Connect(); JobArgs jobArgs = new JobArgs(Query); ForEachEnum( enumType, (@enum) => { var job = this.RunWait(service, jobArgs); jobFunction(job, @enum); job.CancelAsync().Wait(); }); }
public static void RaiseJobEndedEvent(JobArgs args) { Event.RaiseEvent("job:ended", new object[] { }); }
public Task<SearchResultStream> SearchOneShotAsync(string search, int count = 0, JobArgs args = null, CustomJobArgs customArgs = null) { Contract.Requires<ArgumentNullException>(search != null); return default(Task<SearchResultStream>); }
void CanSetEveryValue() { var args = new JobArgs() { AutoCancel = 1, AutoFinalizeEventCount = 2, AutoPause = 3, EarliestTime = "some_unchecked_string", EnableLookups = false, ForceBundleReplication = true, Id = "some_unchecked_string", IndexEarliest = "some_unchecked_string", IndexLatest = "some_unchecked_string", LatestTime = "some_unchecked_string", MaxCount = 4, MaxTime = 5, Namespace = "some_unchecked_string", Now = "some_unchecked_string", RealTimeBlocking = true, RealTimeIndexFilter = true, RealTimeMaxBlockSeconds = 6, RealTimeQueueSize = 7, ReduceFrequency = 8, ReloadMacros = false, RemoteServerList = "some_unchecked_string", RequiredFieldList = new List<string>() { "some_unchecked_string", "some_other_uncheck_string" }, ReuseMaxSecondsAgo = 9, SearchListener = "some_unchecked_string", SearchMode = SearchMode.RealTime, SpawnProcess = false, StatusBuckets = 10, SyncBundleReplication = true, TimeFormat = "some_unchecked_string", Timeout = 11 }; Assert.Equal( "auto_cancel=1; " + "auto_finalize_ec=2; " + "auto_pause=3; " + "earliest_time=some_unchecked_string; " + "enable_lookups=0; " + "force_bundle_replication=1; " + "id=some_unchecked_string; " + "index_earliest=some_unchecked_string; " + "index_latest=some_unchecked_string; " + "latest_time=some_unchecked_string; " + "max_count=4; " + "max_time=5; " + "namespace=some_unchecked_string; " + "now=some_unchecked_string; " + "reduce_freq=8; " + "reload_macros=0; " + "remote_server_list=some_unchecked_string; " + "reuse_max_seconds_ago=9; " + "rf=some_unchecked_string; " + "rf=some_other_uncheck_string; " + "rt_blocking=1; " + "rt_indexfilter=1; " + "rt_maxblocksecs=6; " + "rt_queue_size=7; " + "search_listener=some_unchecked_string; " + "search_mode=realtime; " + "spawn_process=0; " + "status_buckets=10; " + "sync_bundle_replication=1; " + "time_format=some_unchecked_string; " + "timeout=11", args.ToString()); Assert.Equal(new List<Argument>() { new Argument("auto_cancel", "1"), new Argument("auto_finalize_ec", "2"), new Argument("auto_pause", "3"), new Argument("earliest_time", "some_unchecked_string"), new Argument("enable_lookups", 0), new Argument("force_bundle_replication", 1), new Argument("id", "some_unchecked_string"), new Argument("index_earliest", "some_unchecked_string"), new Argument("index_latest", "some_unchecked_string"), new Argument("latest_time", "some_unchecked_string"), new Argument("max_count", "4"), new Argument("max_time", "5"), new Argument("namespace", "some_unchecked_string"), new Argument("now", "some_unchecked_string"), new Argument("reduce_freq", "8"), new Argument("reload_macros", 0), new Argument("remote_server_list", "some_unchecked_string"), new Argument("reuse_max_seconds_ago", "9"), new Argument("rf", "some_unchecked_string"), new Argument("rf", "some_other_uncheck_string"), new Argument("rt_blocking", 1), new Argument("rt_indexfilter", 1), new Argument("rt_maxblocksecs", "6"), new Argument("rt_queue_size", "7"), new Argument("search_listener", "some_unchecked_string"), new Argument("search_mode", "realtime"), new Argument("spawn_process", 0), new Argument("status_buckets", "10"), new Argument("sync_bundle_replication", 1), new Argument("time_format", "some_unchecked_string"), new Argument("timeout", "11") }, args); }
///// <summary> ///// Invalid argument ///// </summary> //private readonly Args<SearchOption> badOutputMode = // new Args("output_mode", "invalid_arg_value"); ///// <summary> ///// Invalid argument ///// </summary> //private readonly Args<SearchMode> badSearchMode = // new Args("search_mode", "invalid_arg_value"); ///// <summary> ///// Invalid argument ///// </summary> //private readonly Args badTruncationMode = // new Args("truncation_mode", "invalid_arg_value"); ///// <summary> ///// Invalid argument ///// </summary> //private readonly Args badExecutionMode = // new Args("exec_mode", "invalid_arg_value"); ///// <summary> ///// Run the given query. ///// </summary> ///// <param name="service">The service</param> ///// <param name="query">The search query</param> ///// <returns>The job</returns> //private Job Run(Service service, string query) //{ // return this.Run(service, query, null); //} ///// <summary> ///// Run the given query with the given query args. ///// </summary> ///// <param name="service">The service</param> ///// <param name="query">The search query</param> ///// <param name="args">The args</param> ///// <returns>The job</returns> //private Job Run(Service service, string query, JobArgs args) //{ // args.Search = query; // return service.StartJob(args); //} ///// <summary> ///// Run the given query and wait for the job to complete. ///// </summary> ///// <param name="service">The service</param> ///// <param name="query">The search query</param> ///// <returns>The job</returns> //private Job RunWait(Service service, string query) //{ // return this.RunWait(service, query, null); //} /// <summary> /// Run the given query with the given query args and wait for the job to /// complete. /// </summary> /// <param name="service">The service</param> /// <param name="jobArgs">The args</param> /// <returns>The job</returns> private Job RunWait(Service service, JobArgs jobArgs) { return(service.CreateJobAsync(jobArgs).Result); }
void CanSetEveryValue() { var args = new JobArgs() { AutoCancel = 1, AutoFinalizeEventCount = 2, AutoPause = 3, EarliestTime = "some_unchecked_string", EnableLookups = false, ForceBundleReplication = true, Id = "some_unchecked_string", IndexEarliest = "some_unchecked_string", IndexLatest = "some_unchecked_string", LatestTime = "some_unchecked_string", MaxCount = 4, MaxTime = 5, Namespace = "some_unchecked_string", Now = "some_unchecked_string", RealTimeBlocking = true, RealTimeIndexFilter = true, RealTimeMaxBlockSeconds = 6, RealTimeQueueSize = 7, ReduceFrequency = 8, ReloadMacros = false, RemoteServerList = "some_unchecked_string", RequiredFieldList = new List <string>() { "some_unchecked_string", "some_other_uncheck_string" }, ReuseMaxSecondsAgo = 9, SearchListener = "some_unchecked_string", SearchMode = SearchMode.RealTime, SpawnProcess = false, StatusBuckets = 10, SyncBundleReplication = true, TimeFormat = "some_unchecked_string", Timeout = 11 }; Assert.Equal( "auto_cancel=1; " + "auto_finalize_ec=2; " + "auto_pause=3; " + "earliest_time=some_unchecked_string; " + "enable_lookups=0; " + "force_bundle_replication=1; " + "id=some_unchecked_string; " + "index_earliest=some_unchecked_string; " + "index_latest=some_unchecked_string; " + "latest_time=some_unchecked_string; " + "max_count=4; " + "max_time=5; " + "namespace=some_unchecked_string; " + "now=some_unchecked_string; " + "reduce_freq=8; " + "reload_macros=0; " + "remote_server_list=some_unchecked_string; " + "reuse_max_seconds_ago=9; " + "rf=some_unchecked_string; " + "rf=some_other_uncheck_string; " + "rt_blocking=1; " + "rt_indexfilter=1; " + "rt_maxblocksecs=6; " + "rt_queue_size=7; " + "search_listener=some_unchecked_string; " + "search_mode=realtime; " + "spawn_process=0; " + "status_buckets=10; " + "sync_bundle_replication=1; " + "time_format=some_unchecked_string; " + "timeout=11", args.ToString()); Assert.Equal(new List <Argument>() { new Argument("auto_cancel", "1"), new Argument("auto_finalize_ec", "2"), new Argument("auto_pause", "3"), new Argument("earliest_time", "some_unchecked_string"), new Argument("enable_lookups", 0), new Argument("force_bundle_replication", 1), new Argument("id", "some_unchecked_string"), new Argument("index_earliest", "some_unchecked_string"), new Argument("index_latest", "some_unchecked_string"), new Argument("latest_time", "some_unchecked_string"), new Argument("max_count", "4"), new Argument("max_time", "5"), new Argument("namespace", "some_unchecked_string"), new Argument("now", "some_unchecked_string"), new Argument("reduce_freq", "8"), new Argument("reload_macros", 0), new Argument("remote_server_list", "some_unchecked_string"), new Argument("reuse_max_seconds_ago", "9"), new Argument("rf", "some_unchecked_string"), new Argument("rf", "some_other_uncheck_string"), new Argument("rt_blocking", 1), new Argument("rt_indexfilter", 1), new Argument("rt_maxblocksecs", "6"), new Argument("rt_queue_size", "7"), new Argument("search_listener", "some_unchecked_string"), new Argument("search_mode", "realtime"), new Argument("spawn_process", 0), new Argument("status_buckets", "10"), new Argument("sync_bundle_replication", 1), new Argument("time_format", "some_unchecked_string"), new Argument("timeout", "11") }, args); }
/// <summary> /// The main program /// </summary> /// <param name="argv">The command line arguments</param> public static void Main(string[] argv) { Command cli = Command.Splunk("search_realtime"); cli.AddRule("search", typeof(string), "search string"); cli.Parse(argv); if (!cli.Opts.ContainsKey("search")) { System.Console.WriteLine( "Search query string required, use --search=\"query\""); Environment.Exit(1); } var service = Service.Connect(cli.Opts); // Realtime window is 5 minutes var queryArgs = new JobArgs { SearchMode = JobArgs.SearchModeEnum.Realtime, EarliestTime = "rt-5m", LatestTime = "rt", }; var job = service.GetJobs().Create( (string)cli.Opts["search"], queryArgs); var outputArgs = new JobResultsPreviewArgs { OutputMode = JobResultsPreviewArgs.OutputModeEnum.Xml, // Return all entries. Count = 0 }; for (var i = 0; i < 5; i++) { System.Console.WriteLine(); System.Console.WriteLine(); System.Console.WriteLine("Snapshot " + i + ":"); using (var stream = job.ResultsPreview(outputArgs)) { using (var rr = new ResultsReaderXml(stream)) { foreach (var @event in rr) { System.Console.WriteLine("EVENT:"); foreach (string key in @event.Keys) { System.Console.WriteLine( " " + key + " -> " + @event[key]); } } } } Thread.Sleep(500); } job.Cancel(); }
private void ExecuteJobAsyncInternal(object sender, DoWorkEventArgs e) { JobArgs args = (JobArgs)e.Argument; ExecuteJob(args.Connection, args.Job); }
public static IJob ConfigureJob(JobArgs args) { IJob job = null; switch (args.JobType) { case Models.JobType.Announceemnt: { string dt = args.FirstAnnouncedDate.HasValue ? args.FirstAnnouncedDate.Value.ToString("MM/dd/yyyy") : string.Empty; DateTime?firstAnnouncedDate = DateTime.ParseExact(dt, "MM/dd/yyyy", System.Globalization.CultureInfo.InvariantCulture); var tasks = new List <ITask>(); if (!args.IsBrowserUpload) { tasks.Add(new AnnouncementFTPDownloadTask()); } tasks.Add(new AnnouncementBulkInsertTask(args.FileName)); tasks.Add(new ProcessAnnouncementTask(firstAnnouncedDate)); if (!args.IsBrowserUpload) { tasks.Add(new FTPAnnouncementArchiveTask()); } job = new DeluxeOM.Announcement.BulkUploader.AnnouncementUploaderJob(tasks); break; } case Models.JobType.PipelineOrder: { var tasks = new List <ITask>(); if (!args.IsBrowserUpload) { tasks.Add(new PipeLineOrderFTPDownloadTask()); } tasks.Add(new PipeLineOrderBulkInsertTask(args.FileName)); tasks.Add(new ProcessPipeLineOrderTask()); if (!args.IsBrowserUpload) { tasks.Add(new FTPPipelineOrderArchiveTask()); } job = new DeluxeOM.PO.BulkUploader.PipeLineOrderUploaderJob(tasks); break; } case Models.JobType.PriceReport: { var tasks = new List <ITask>(); if (!args.IsBrowserUpload) { tasks.Add(new PriceReportFTPDownloadTask()); } tasks.Add(new PriceReportBulkInsertTask(args.FileName)); tasks.Add(new ProcessPriceReportTask()); if (!args.IsBrowserUpload) { tasks.Add(new FTPPriceReportArchiveTask()); } job = new DeluxeOM.PR.BulkUploader.PriceReportUploaderJob(tasks); break; } case Models.JobType.QCReport: { var tasks = new List <ITask>(); if (!args.IsBrowserUpload) { tasks.Add(new QCReportFTPDownloadTask()); } tasks.Add(new QCReportBulkInsertTask(args.FileName)); tasks.Add(new ProcessQCReportTask()); if (!args.IsBrowserUpload) { tasks.Add(new FTPQCReportArchiveTask()); } job = new DeluxeOM.QC.BulkUploader.QCReportUploaderJob(tasks); break; } case Models.JobType.TitleReport: { var tasks = new List <ITask>() { new ProcessTitleTask() }; job = new DeluxeOM.Title.BulkUploader.ProcessTitleJob(tasks); break; } } job.RunBy = string.IsNullOrEmpty(args.TriggeredBy) ? "SYSTEM" : args.TriggeredBy.Replace('_', ' '); return(job); }
/// <summary> /// Asynchronously updates the specification of a search <see cref="Job"/>. /// </summary> /// <param name="searchId"> /// ID of a search <see cref="Job"/>. /// </param> /// <param name="args"> /// New specification of the search job. /// </param> /// <remarks> /// This method uses the <a href="http://goo.gl/8HjDNS">POST /// search/jobs/{search_id}</a> to update the <see cref="JobArgs"/> for /// search <see cref="Job"/> identified by <see cref="searchId"/>. /// </remarks> public async Task UpdateJobArgsAsync(string searchId, JobArgs args) { using (var response = await this.Context.PostAsync(this.Namespace, new ResourceName(JobCollection.ClassResourceName, searchId), args)) { await response.EnsureStatusCodeAsync(HttpStatusCode.OK); } }
public static JobArgs DoParseJobLine(string line) { var def = (line ?? "").Trim().Replace("\t", " "); if (string.IsNullOrWhiteSpace(def) || def[0] == '#') { return(null); } var result = new JobArgs(); if (def[0] == '@') { var spec = StringParser.ExtractWord(ref def).ToLower(); switch (spec) { case "@service": result.Service = true; result.Command = def; return(result); case "@reboot": result.Reboot = true; result.Command = def; return(result); case "@yearly": def = "0 0 1 1 * " + def; break; case "@annually": def = "0 0 1 1 * " + def; break; case "@monthly": def = "0 0 1 * * " + def; break; case "@weekly": def = "0 0 * * 0 " + def; break; case "@daily": def = "0 0 * * * " + def; break; case "@hourly": def = "0 * * * * " + def; break; default: return(null); } } try { result.Minute = StringParser.ExtractWord(ref def); result.Hour = StringParser.ExtractWord(ref def); result.Day = StringParser.ExtractWord(ref def); result.Month = StringParser.ExtractWord(ref def); result.Weekday = StringParser.ExtractWord(ref def); result.Command = def; return(result); } catch (Exception) { return(null); } }
///// <summary> ///// Invalid argument ///// </summary> //private readonly Args<SearchOption> badOutputMode = // new Args("output_mode", "invalid_arg_value"); ///// <summary> ///// Invalid argument ///// </summary> //private readonly Args<SearchMode> badSearchMode = // new Args("search_mode", "invalid_arg_value"); ///// <summary> ///// Invalid argument ///// </summary> //private readonly Args badTruncationMode = // new Args("truncation_mode", "invalid_arg_value"); ///// <summary> ///// Invalid argument ///// </summary> //private readonly Args badExecutionMode = // new Args("exec_mode", "invalid_arg_value"); ///// <summary> ///// Run the given query. ///// </summary> ///// <param name="service">The service</param> ///// <param name="query">The search query</param> ///// <returns>The job</returns> //private Job Run(Service service, string query) //{ // return this.Run(service, query, null); //} ///// <summary> ///// Run the given query with the given query args. ///// </summary> ///// <param name="service">The service</param> ///// <param name="query">The search query</param> ///// <param name="args">The args</param> ///// <returns>The job</returns> //private Job Run(Service service, string query, JobArgs args) //{ // args.Search = query; // return service.StartJob(args); //} ///// <summary> ///// Run the given query and wait for the job to complete. ///// </summary> ///// <param name="service">The service</param> ///// <param name="query">The search query</param> ///// <returns>The job</returns> //private Job RunWait(Service service, string query) //{ // return this.RunWait(service, query, null); //} /// <summary> /// Run the given query with the given query args and wait for the job to /// complete. /// </summary> /// <param name="service">The service</param> /// <param name="jobArgs">The args</param> /// <returns>The job</returns> private Job RunWait(Service service, JobArgs jobArgs) { return service.CreateJobAsync(jobArgs).Result; }
/// <summary> /// Run the given query with the given query args. /// </summary> /// <param name="service">The service</param> /// <param name="query">The search query</param> /// <param name="args">The args</param> /// <returns>The job</returns> private Job Run(Service service, string query, JobArgs args) { return(service.GetJobs().Create(query, args)); }
public void BadOutputMode() { var service = Connect(); JobArgs jobArgs = new JobArgs( "invalidpart" + Query); //jobArgs.outuputMode = badOutputMode; Job job = null; try { job = service.CreateJobAsync(jobArgs).Result; } catch (Exception e) { if (!e.InnerException.Message.ToLower().Contains("400: bad request")) { throw; } } finally { if (job != null) { job.CancelAsync().Wait(); } } }
public Task<Job> SearchAsync(string search, int count = 0, ExecutionMode mode = ExecutionMode.Normal, JobArgs args = null, CustomJobArgs customArgs = null) { Contract.Requires<ArgumentNullException>(search != null); return default(Task<Job>); }
private async void DisplaySearchPreviewResult(string searchStr) { int maxResultCount = 10000; JobArgs args = new JobArgs(); args.EarliestTime = this.searchEarliestTime; args.LatestTime = this.searchLatestTime; args.SearchMode = SearchMode.RealTime; Job realtimeJob = await MainPage.SplunkService.Jobs.CreateAsync(searchStr, count: maxResultCount, args: args); Stopwatch watch = new Stopwatch(); watch.Start(); this.allResults.Clear(); int resultCount = 0; do { using (SearchResultStream resultStream = await realtimeJob.GetSearchPreviewAsync()) { resultCount = resultStream.Count(); } } while (resultCount == 0 && watch.Elapsed.TotalSeconds <= 10 && !this.cancelSearchTokenSource.Token.IsCancellationRequested); bool showFirstPage = false; SearchResultArgs searchArgs = new SearchResultArgs(); searchArgs.Count = maxResultCount; using (SearchResultStream resultStream = await realtimeJob.GetSearchPreviewAsync(searchArgs)) { titleGrid.Visibility = Visibility.Visible; Task task = this.GetResultTask(resultStream); //start a task to get all results do { if (this.allResults.Count > 0) { if (!showFirstPage && this.ShowResultPage(this.allResults, 0, this.itemsPerPage)) { showFirstPage = true; } if (this.currentShownPageIndex < 0) { ShowPagingLink(0); } } await Task.Delay(1000); } while (!(task.Status == TaskStatus.RanToCompletion || task.Status == TaskStatus.Faulted || task.Status == TaskStatus.Canceled)); if (!showFirstPage) { this.ShowResultPage(this.allResults, 0, this.itemsPerPage); } if (this.currentShownPageIndex < 0) { ShowPagingLink(0); } this.PageContentReset(); await realtimeJob.CancelAsync(); } }
private async void DisplaySearchPreviewResult(string searchStr) { int maxResultCount = 10000; JobArgs args = new JobArgs(); args.EarliestTime = this.searchEarliestTime; args.LatestTime = this.searchLatestTime; args.SearchMode = SearchMode.RealTime; Job realtimeJob = await MainPage.SplunkService.Jobs.CreateAsync(searchStr, count : maxResultCount, args : args); Stopwatch watch = new Stopwatch(); watch.Start(); this.allResults.Clear(); int resultCount = 0; do { using (SearchResultStream resultStream = await realtimeJob.GetSearchPreviewAsync()) { resultCount = resultStream.Count(); } } while (resultCount == 0 && watch.Elapsed.TotalSeconds <= 10 && !this.cancelSearchTokenSource.Token.IsCancellationRequested); bool showFirstPage = false; SearchResultArgs searchArgs = new SearchResultArgs(); searchArgs.Count = maxResultCount; using (SearchResultStream resultStream = await realtimeJob.GetSearchPreviewAsync(searchArgs)) { titleGrid.Visibility = Visibility.Visible; Task task = this.GetResultTask(resultStream); //start a task to get all results do { if (this.allResults.Count > 0) { if (!showFirstPage && this.ShowResultPage(this.allResults, 0, this.itemsPerPage)) { showFirstPage = true; } if (this.currentShownPageIndex < 0) { ShowPagingLink(0); } } await Task.Delay(1000); } while (!(task.Status == TaskStatus.RanToCompletion || task.Status == TaskStatus.Faulted || task.Status == TaskStatus.Canceled)); if (!showFirstPage) { this.ShowResultPage(this.allResults, 0, this.itemsPerPage); } if (this.currentShownPageIndex < 0) { ShowPagingLink(0); } this.PageContentReset(); await realtimeJob.CancelAsync(); } }
public async Task JobTransitionDelay() { using (var service = await SdkHelper.CreateService()) { //// Reference: [Algorithms for calculating variance](https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm) var args = new JobArgs { SearchMode = SearchMode.RealTime }; var min = double.PositiveInfinity; var max = double.NegativeInfinity; var n = 0; var mean = 0.0; var variance = 0.0; var sampleSize = 1; // increase to compute statistics used to derive Assert.InRange values for (n = 1; n <= sampleSize; ++n) { Job job = await service.Jobs.CreateAsync("search index=_internal", args : args); DateTime start = DateTime.Now; int totalDelay = 0; for (int delay = 1000; delay < 5000; delay += 1000) { try { await job.TransitionAsync(DispatchState.Done, delay); break; } catch (TaskCanceledException) { } totalDelay += delay; } var duration = DateTime.Now - start; try { await job.CancelAsync(); } catch (TaskCanceledException) { } var x = duration.TotalMilliseconds - totalDelay; if (x < min) { min = x; } if (x > max) { max = x; } var delta = x - mean; mean += delta / n; variance += delta * (x - mean); // Statistically derived by repeated tests with sampleSize = 100; no failures in a test with sampleSize = 10,000 // This range is outside three standard deviations. Adjust as required to support your test environment. Assert.InRange(x, 1000, 5000); } double sd; if (--n < 2) { sd = variance = double.NaN; } else { variance /= n - 1; sd = Math.Sqrt(variance); } Console.WriteLine("\n Mean: {0}\n SD: {1}\n Range: [{3}, {4}]\n N: {2}", mean, sd, min, max, n); // swallowed by Xunit version [1.0, 2.0) } }
/// <summary> /// Asynchronously starts a new search <see cref="Job"/>. /// </summary> /// <param name="args"> /// Specification of a search job. /// </param> /// <returns> /// An object representing the search job that was started. /// </returns> /// <remarks> /// This method uses the <a href="http://goo.gl/JZcPEb">POST /// search/jobs</a> endpoint to start a new search <see cref="Job"/> as /// specified by <see cref="args"/>. /// </remarks> public async Task<Job> CreateJobAsync(JobArgs args) { Contract.Requires<ArgumentNullException>(args != null, "args"); Contract.Requires<ArgumentNullException>(args.Search != null, "args.Search"); Contract.Requires<ArgumentException>(args.ExecutionMode != ExecutionMode.Oneshot, "args.ExecutionMode: ExecutionMode.Oneshot"); // FJR: Also check that it's not export, which also won't return a job. // DSN: JobArgs does not include SearchExportArgs string searchId; using (var response = await this.Context.PostAsync(this.Namespace, JobCollection.ClassResourceName, args)) { await response.EnsureStatusCodeAsync(HttpStatusCode.Created); searchId = await response.XmlReader.ReadResponseElementAsync("sid"); } // FJR: Jobs need to be handled a little more delicately. Let's talk about the patterns here. // In the other SDKs, we've been doing functions to wait for ready and for done. Async means // that we can probably make that a little slicker, but let's talk about how. Job job = new Job(this.Context, this.Namespace, name: searchId); await job.GetAsync(); return job; }
public async Task JobResultsDefaultsToAll() { using (var service = await SdkHelper.CreateService()) { JobArgs jobArgs = new JobArgs(); var job = await service.Jobs.CreateAsync("search index=_* | head 101", args: jobArgs); using (SearchResultStream stream = await job.GetSearchResultsAsync()) { // Is the result count greater than the default of 100? Assert.Equal(101, job.ResultCount); } await job.CancelAsync(); } }
/// <summary> /// Creates a search <see cref="Job"/>. /// </summary> /// <param name="args"></param> /// <returns></returns> /// <remarks> /// See the <a href="http://goo.gl/b02g1d">POST search/jobs</a> REST API Reference. /// </remarks> public async Task<SearchResults> SearchOneshotAsync(JobArgs args) { Contract.Requires<ArgumentNullException>(args != null, "args"); args.ExecutionMode = ExecutionMode.Oneshot; Response response = await this.Context.PostAsync(this.Namespace, JobCollection.ClassResourceName, args); try { await response.EnsureStatusCodeAsync(HttpStatusCode.OK); // FJR: Like export, we should probably return a stream instead of parsing it here. // DSN: The SearchResultsSet class is a stream of Record objects. TODO: Explain return await SearchResults.CreateAsync(response, leaveOpen: false); // Transfers response ownership } catch { response.Dispose(); throw; } }
public async Task JobTransitionDelay() { using (var service = await SdkHelper.CreateService()) { //// Reference: [Algorithms for calculating variance](https://en.wikipedia.org/wiki/Algorithms_for_calculating_variance#Online_algorithm) var args = new JobArgs { SearchMode = SearchMode.RealTime }; var min = double.PositiveInfinity; var max = double.NegativeInfinity; var n = 0; var mean = 0.0; var variance = 0.0; var sampleSize = 1; // increase to compute statistics used to derive Assert.InRange values for (n = 1; n <= sampleSize; ++n) { Job job = await service.Jobs.CreateAsync("search index=_internal", args: args); DateTime start = DateTime.Now; int totalDelay = 0; for (int delay = 1000; delay < 5000; delay += 1000) { try { await job.TransitionAsync(DispatchState.Done, delay); break; } catch (TaskCanceledException) { } totalDelay += delay; } var duration = DateTime.Now - start; try { await job.CancelAsync(); } catch (TaskCanceledException) { } var x = duration.TotalMilliseconds - totalDelay; if (x < min) min = x; if (x > max) max = x; var delta = x - mean; mean += delta / n; variance += delta * (x - mean); // Statistically derived by repeated tests with sampleSize = 100; no failures in a test with sampleSize = 10,000 // This range is outside three standard deviations. Adjust as required to support your test environment. Assert.InRange(x, 1000, 4000); } double sd; if (--n < 2) { sd = variance = double.NaN; } else { variance /= n - 1; sd = Math.Sqrt(variance); } Console.WriteLine("\n Mean: {0}\n SD: {1}\n Range: [{3}, {4}]\n N: {2}", mean, sd, min, max, n); // swallowed by Xunit version [1.0, 2.0) } }