public RectangleTransparent() { clearPen = new Pen(Color.FromArgb(1, 0, 0, 0)); borderDotPen = new Pen(Color.Black, 1); borderDotPen2 = new Pen(Color.White, 1); borderDotPen2.DashPattern = new float[] { 5, 5 }; penTimer = Stopwatch.StartNew(); ScreenRectangle = CaptureHelpers.GetScreenBounds(); surface = new Bitmap(ScreenRectangle.Width, ScreenRectangle.Height); gSurface = Graphics.FromImage(surface); gSurface.InterpolationMode = InterpolationMode.NearestNeighbor; gSurface.SmoothingMode = SmoothingMode.HighSpeed; gSurface.CompositingMode = CompositingMode.SourceCopy; gSurface.CompositingQuality = CompositingQuality.HighSpeed; gSurface.Clear(Color.FromArgb(1, 0, 0, 0)); StartPosition = FormStartPosition.Manual; Bounds = ScreenRectangle; Text = "ShareX - " + Resources.RectangleTransparent_RectangleTransparent_Rectangle_capture_transparent; Shown += RectangleLight_Shown; KeyUp += RectangleLight_KeyUp; MouseDown += RectangleLight_MouseDown; MouseUp += RectangleLight_MouseUp; using (MemoryStream cursorStream = new MemoryStream(Resources.Crosshair)) { Cursor = new Cursor(cursorStream); } timer = new Timer { Interval = 10 }; timer.Tick += timer_Tick; timer.Start(); }
private static void OnBeginRequest(object sender, EventArgs e) { var context = (HttpApplication)sender; // TaskCreationOptions.PreferFairness 를 지정해야, StartNew() 메소드를 바로 시작한다. var stopwatchTask = new Task<Lazy<double>>(() => { var sw = new Stopwatch(); sw.Start(); if(IsDebugEnabled) { var request = context.Request; log.Debug(BeginRequestLogFormat, request.UserHostAddress, request.RequestType, request.CurrentExecutionFilePath); } // Lazy 값을 처음 호출할 때, stop watch가 끝나고, 경과 값을 반환한다. return new Lazy<double>(() => { sw.Stop(); return sw.ElapsedMilliseconds; }); }); stopwatchTask.Start(); Local.Data[AsyncAccessLogModuleKey] = stopwatchTask; }
/// <summary> /// Экспортирует массив данных в XLSX формат с учетом выбранной локали /// </summary> /// <param name="path">Путь к файлу, в который нужно сохранить данные</param> /// <param name="localisation">Локализация</param> /// <returns>Успешное завершение операции</returns> public override bool Export(String path, Localisation localisation) { try { if (!path.EndsWith(".xlsx")) path += ".xlsx"; log.Info(String.Format("Export to .xlsx file to: {0}", path)); var timer = new Stopwatch(); timer.Start(); var file = new FileInfo(path); using (var pck = new ExcelPackage(file)) { ExcelWorksheet ws = pck.Workbook.Worksheets.Add("Sheet1"); ws.Cells["A1"].LoadFromDataTable(dataTable, true); ws.Cells.AutoFitColumns(); pck.Save(); } timer.Stop(); log.Info(String.Format("Export complete! Elapsed time: {0} ms", timer.Elapsed.Milliseconds)); return true; } catch (Exception ex) { log.Error("Can't export to .xlsx file!", ex); return false; } }
public static void Main() { #if DEBUG Console.SetIn(new System.IO.StreamReader(@"../../test.020.in.txt")); Debug.Listeners.Add(new ConsoleTraceListener()); #endif Stopwatch sw = new Stopwatch(); sw.Start(); StringBuilder sb = new StringBuilder(); int lines = int.Parse(Console.ReadLine()); for (int i = 0; i < lines; i++) { string line = Console.ReadLine(); bool isValid = Validate(line); if (isValid) { sb.AppendLine("VALID"); } else { sb.AppendLine("INVALID"); } } sw.Stop(); Console.Write(sb.ToString()); Debug.WriteLine(sw.Elapsed); string bla = "asdlj"; }
/* GNOME cache -> 00:00:05.6705334 (creating cache) 00:00:00.2680794.(once cached) With no cache -> 00:00:28.8975945 (using Pixbuf) */ static void Main() { string[] files; Provider provider = Factory.Provider; Pixbuf buf; TimeSpan time; int nulls = 0; files = Directory.GetFiles ("/home/jordi/Desktop/berna"); // Directory with images to test Stopwatch stopWatch = new Stopwatch (); stopWatch.Start (); foreach (string file in files) { buf = provider.GetThumbnail (file, 96, 96); if (buf == null) { nulls++; buf = new Pixbuf (file); } buf.Dispose (); } stopWatch.Stop (); time = stopWatch.Elapsed; Console.WriteLine ("Time needed {0}. Total files {1}, thumbnailed {2}, skipped {3}", time, files.Length, files.Length - nulls, nulls); return; }
public static IList<TextChange> BuildChangeList(string oldText, string newText, int maxMilliseconds) { List<TextChange> changes = new List<TextChange>(); var sw = new Stopwatch(); sw.Start(); // Simple witespace/nowhitespace tokenization and comparison int oldIndex = 0; int newIndex = 0; while (true) { bool thereIsMore = NextChunk(oldText, ref oldIndex, newText, ref newIndex, changes); if (!thereIsMore) break; thereIsMore = NextChunk(oldText, ref oldIndex, newText, ref newIndex, changes); if (!thereIsMore) break; if (sw.ElapsedMilliseconds > maxMilliseconds) { return null; // time's up } } return changes; }
private static void Main(string[] args) { Process.GetCurrentProcess().PriorityClass = ProcessPriorityClass.BelowNormal; //XTrace.Log = new NetworkLog(); XTrace.UseConsole(); #if DEBUG XTrace.Debug = true; #endif while (true) { Stopwatch sw = new Stopwatch(); sw.Start(); #if !DEBUG try { #endif Test2(); #if !DEBUG } catch (Exception ex) { XTrace.WriteException(ex); } #endif sw.Stop(); Console.WriteLine("OK! 耗时 {0}", sw.Elapsed); ConsoleKeyInfo key = Console.ReadKey(true); if (key.Key != ConsoleKey.C) break; } }
private string Measure(int a_bytes, Action a_action) { Stopwatch sw = new Stopwatch(); int repeats = 0; a_action(); for (; ; ) { HighPriority(); try { sw.Start(); a_action(); sw.Stop(); } finally { NormalPriority(); } repeats++; if (sw.ElapsedMilliseconds > MEASURE_TIME) if (repeats > MIN_REPEATS) break; } double mbs = a_bytes / 1024.0 / 1024.0 * repeats; double seconds = sw.ElapsedMilliseconds / 1000.0; return String.Format((mbs / seconds).ToString("F2")); }
public static void DoStuff(ProgramOptions options) { Options = options; GlobalStopwatch = Stopwatch.StartNew(); DumpSettings(Options); if (!Options.ShowTitles && Options.ShouldLog) { foreach (var output in Outputs) { ConsoleExtensions.WriteLineColor(output.OutputColor, output.Name); } } Iterations = Options.NumberOfIterations; for (var i = 1; i <= Iterations; i++) { ConsoleExtensions.WriteLine($"Iteration {i}".ToUpper()); foreach (var increment in Increments) { RunIncrement(increment); } } GlobalStopwatch.Stop(); if (Options.ShouldOutput) WriteOutputs(DateTime.UtcNow); if (Options.ShouldHaltOnEnd) ConsoleExtensions.WriteLine("DONE"); if (Options.ShouldHaltOnEnd) Console.ReadLine(); }
/// <summary> /// Add token message at header to using NHibernate cache /// </summary> /// <param name="request"></param> /// <param name="channel"></param> /// <returns></returns> public object BeforeSendRequest(ref Message request, IClientChannel channel) { // add trace log for debug and performance tuning if (null != (request.Headers).MessageId && (request.Headers).MessageId.IsGuid) { ServiceStopWatch = Stopwatch.StartNew(); Guid messageId; (request.Headers).MessageId.TryGetGuid(out messageId); CurrentTraceInfo = new TraceInfo() { SessionId = (HttpContext.Current != null && HttpContext.Current.Session != null) ? HttpContext.Current.Session.SessionID : "", TraceType = TraceType.WcfActionClientCall, TraceName = request.Headers.Action, TraceUniqueId = messageId.ToString() }; TraceLogger.Instance.TraceServiceStart(CurrentTraceInfo, true); // Add a message header with sessionid MessageHeader<string> messageHeader = new MessageHeader<string>(CurrentTraceInfo.SessionId); MessageHeader untyped = messageHeader.GetUntypedHeader("sessionid", "ns"); request.Headers.Add(untyped); } return null; }
public DashboardView() { InitializeComponent(); AndonManager = new AndonManager(StationList, null, Andonmanager.AndonManager.MODE.MASTER); AndonManager.start(); StationList = new Queue<int>(); Plans = new Plans(); PlanGrid.DataContext = Plans; Actuals = new Models.Actuals(); ActualGrid.DataContext = Actuals; AppTimer = new Timer(1000); AppTimer.AutoReset = false; AppTimer.Elapsed += AppTimer_Elapsed; EfficiencyWatch = new Stopwatch(); using (PSBContext DBContext = new PSBContext()) { Shifts = DBContext.Shifts.ToList(); foreach (Shift s in Shifts) { s.Update(); } } AppTimer.Start(); }
Task IEventProcessor.OpenAsync(PartitionContext context) { Console.WriteLine("SimpleEventProcessor initialized. Partition: '{0}', Offset: '{1}'", context.Lease.PartitionId, context.Lease.Offset); this.checkpointStopWatch = new Stopwatch(); this.checkpointStopWatch.Start(); return Task.FromResult<object>(null); }
public void when_sending_message_with_session_then_session_receiver_gets_both_messages_fast() { var sender = this.Settings.CreateTopicClient(this.Topic); var signal = new AutoResetEvent(false); var body1 = Guid.NewGuid().ToString(); var body2 = Guid.NewGuid().ToString(); var stopWatch = new Stopwatch(); var receiver = new SessionSubscriptionReceiver(this.Settings, this.Topic, this.Subscription); sender.Send(new BrokeredMessage(body1) { SessionId = "foo" }); sender.Send(new BrokeredMessage(body2) { SessionId = "bar" }); var received = new ConcurrentBag<string>(); receiver.Start( m => { received.Add(m.GetBody<string>()); signal.Set(); return MessageReleaseAction.CompleteMessage; }); signal.WaitOne(); stopWatch.Start(); signal.WaitOne(); stopWatch.Stop(); receiver.Stop(); Assert.Contains(body1, received); Assert.Contains(body2, received); Assert.InRange(stopWatch.Elapsed, TimeSpan.Zero, TimeSpan.FromSeconds(2)); }
private static void Main() { // Testing different type of reverse algorithms Stopwatch timeTest = new Stopwatch(); Console.Write("Enter some string: "); string str = Console.ReadLine(); // Using StringBuilder timeTest.Start(); string reversed = ReverseSB(str); timeTest.Stop(); Console.WriteLine("Reverse text: {0}\ntime: {1} - StringBuilder class", reversed, timeTest.Elapsed); timeTest.Reset(); Console.WriteLine(); // Using Array.Reverse timeTest.Start(); string reversedArrayReverse = ReverseArray(str); timeTest.Stop(); Console.WriteLine("Reverse text: {0}\ntime: {1} - Array.Reverse", reversedArrayReverse, timeTest.Elapsed); timeTest.Reset(); Console.WriteLine(); // Using XOR timeTest.Start(); string reversedXor = ReverseXor(str); timeTest.Stop(); Console.WriteLine("Reverse text: {0}\ntime: {1} - XOR", reversedXor, timeTest.Elapsed); timeTest.Reset(); }
public void PerformanceListByTeam_With10000BunniesRandomlyDistributedIn5000RoomsInSameTeam() { //Arrange var roomsCount = 5000; var bunniesCount = 10000; for (int i = 0; i < roomsCount; i++) { this.BunnyWarCollection.AddRoom(i); } for (int i = 0; i < bunniesCount; i++) { this.BunnyWarCollection.AddBunny(i.ToString(), 2, this.Random.Next(0, roomsCount)); } //Act Stopwatch timer = new Stopwatch(); timer.Start(); for (int i = 0; i < 10000; i++) { var result = this.BunnyWarCollection.ListBunniesByTeam(2).Count(); Assert.AreEqual(10000, result, "Incorrect count of bunnies returned by List By Team Command!"); } timer.Stop(); Assert.IsTrue(timer.ElapsedMilliseconds < 100); }
static void Main(string[] args) { Stopwatch stopper = new Stopwatch(); stopper.Start(); int i = 0; int numbers = 2; while (i < 1000) { if (isprime(numbers)) { primes[i]=numbers; i++; } numbers++; } int a, b, c, d; a = b = c = d = 0; int n = 0; for (n = 120; true; ++n) { a=dpf(n); b=dpf(n+1); c=dpf(n+2); d=dpf(n+3); if ((a == b) && (b == c) && (c == d) && (d == 4)) break; } stopper.Stop(); Console.WriteLine("The numers are {0} {1} {2} {3} ", n, n+1, n+2, n+3); Console.WriteLine("Elapsed time {0} ms", stopper.ElapsedMilliseconds); }
private static void TestArgumentNullCheckSpeed(int numberOfCalls) { var test1 = new Test1(); var test2 = new Test2(); var normalNullParameterCheckingTesting = new NormalNullParameterCheckingTesting(); var stopWatch = new Stopwatch(); stopWatch.Start(); for (var i = 0; i < numberOfCalls; i++) { normalNullParameterCheckingTesting.TestWith1(test1); } Console.WriteLine("Normal null checking speed with 1 Test parameter: " + stopWatch.ElapsedMilliseconds); stopWatch.Restart(); for (var i = 0; i < numberOfCalls; i++) { normalNullParameterCheckingTesting.TestWith2(test1, test2); } Console.WriteLine("Normal null checking speed with 2 Test parameter: " + stopWatch.ElapsedMilliseconds); }
public static void Main() { Stopwatch watch = new Stopwatch(); Random rand = new Random(); watch.Start(); for (int i = 0; i < iterations; i++) DayOfYear1(rand.Next(1, 13), rand.Next(1, 29)); watch.Stop(); Console.WriteLine("Local array: " + watch.Elapsed); watch.Reset(); watch.Start(); for (int i = 0; i < iterations; i++) DayOfYear2(rand.Next(1, 13), rand.Next(1, 29)); watch.Stop(); Console.WriteLine("Static array: " + watch.Elapsed); // trying to modify static int [] daysCumulativeDays[0] = 18; foreach (int days in daysCumulativeDays) { Console.Write("{0}, ", days); } Console.WriteLine(""); // MY_STR_CONST = "NOT CONST"; }
static void Main(string[] args) { var sw = new Stopwatch(); sw.Start(); try { var options = new Options(); if (CommandLine.Parser.Default.ParseArguments(args, options)) { var processor = new Processor(); if (options.SiteAnalysisMode) { processor.AnalyzeWebSites(options).Wait(); } else { processor.AnalyzeWebPages(options).Wait(); } } } catch (AggregateException ex) { foreach (var exception in ex.InnerExceptions) { Console.Error.WriteLine(exception.Message); } } sw.Stop(); Console.WriteLine(sw.ElapsedMilliseconds); }
public void Build() { _stateHash.Clear(); Stopwatch sw = new Stopwatch(); sw.Start(); var i1 = sw.ElapsedMilliseconds; Data = _language.ParserData; CheckPrecedenceSettings(_language.GrammarData, Data.ParseMethod); var i2 = sw.ElapsedMilliseconds; var i3 = sw.ElapsedMilliseconds; CreateLalrParserStates(); var i4 = sw.ElapsedMilliseconds; //TODO: move all the following to a single method //ComputeTransitionIncludesAndItemLookbacks(); //5 ms var i5 = sw.ElapsedMilliseconds; PropagateTransitionsIncludes(0); //220 ms var i6 = sw.ElapsedMilliseconds; //ComputeTransitionsSources(0); var i7 = sw.ElapsedMilliseconds; ComputeLookaheads(); var i8 = sw.ElapsedMilliseconds; var i9 = sw.ElapsedMilliseconds; ComputeAndResolveConflicts(); var i10 = sw.ElapsedMilliseconds; var i11 = sw.ElapsedMilliseconds; var i12 = sw.ElapsedMilliseconds; if (Data.ParseMethod == ParseMethod.Nlalr) { SwitchConflictingStatesToNonCanonicalLookaheads(); } var i13 = sw.ElapsedMilliseconds; ReportAndSetDefaultActionsForConflicts(); CreateReduceActions(); ComputeStateExpectedLists(); }
static void Main(string[] args) { ///=========================================== ///Unzip the archive in the project directory ///=========================================== Stopwatch sw = new Stopwatch(); TrieNode start = new TrieNode(); Dictionary<string, int> wordsInDictionary = new Dictionary<string, int>(); var words = SetInputText(); PopulateDictionary(sw, words, wordsInDictionary); //takes about 9 secs PopulateTrie(sw, start, words); for (int i = 0; i < 10; i++) { Console.WriteLine("Word: {0}", words[i].ToString()); SearchInTrie(sw, start, words, words[i].ToString()); SearchInDictionary(sw, wordsInDictionary, words, words[i].ToString()); } }
/// <summary> /// Starts the nodelet. /// </summary> /// <param name="options">The nodelet command options.</param> public void Start(Options options) { using (var context = ZmqContext.Create()) using (var client = context.CreateSocket(SocketType.REQ)) { var endpoint = string.Format("tcp://{0}", options.SocketConnection); Console.WriteLine("Connecting to: {0}", endpoint); client.Connect(endpoint); var sw = new Stopwatch(); foreach(var i in Enumerable.Range(1, NumberOfMessages)) { Console.WriteLine("Message {0} of {1} sent", i, NumberOfMessages); var message = DateTime.UtcNow.Ticks.ToString(); sw.Restart(); client.Send(message, Encoding.Unicode); var reply = client.Receive(Encoding.Unicode); sw.Stop(); var messageIsMatch = message == reply ? 1 : 0; Console.WriteLine("{0}: Reply received in {1}ms", messageIsMatch, sw.ElapsedMilliseconds); } } }
public double Insert(User user, Stopwatch stopwatch) { double execTime; using (SqlConnection sqlConnection = new SqlConnection(connectionString)) { sqlConnection.Open(); using (SqlCommand command = new SqlCommand()) { command.Connection = sqlConnection; command.CommandType = CommandType.Text; command.CommandText = "INSERT INTO [dbo].[User] (Id, FirstName, DateAdded, LastName, Age, DepartmentId) VALUES (@id, @firstN, @dateAdd, @lastN, @ag, @depId)"; command.Parameters.AddWithValue("@id", user.Id); command.Parameters.AddWithValue("@firstN", user.FirstName); command.Parameters.AddWithValue("@dateAdd", user.DateAdded); command.Parameters.AddWithValue("@lastN", user.LastName); command.Parameters.AddWithValue("@ag", user.Age); command.Parameters.AddWithValue("@depId", user.DepartmentId); stopwatch.Restart(); command.ExecuteNonQuery(); stopwatch.Stop(); execTime = ((double)stopwatch.ElapsedTicks / (double)System.Diagnostics.Stopwatch.Frequency) * 1000; } sqlConnection.Close(); } return execTime; }
public AgentsModule() : base("/agents") { Before += ctx => { _sw = new Stopwatch(); _sw.Start(); return null; }; After += ctx => { _sw.Stop(); _logger.Trace("{0} [{1} ms] {2}", ctx.Request.Method, _sw.Elapsed.TotalMilliseconds, ctx.Request.Url.ToString()); }; Get["/"] = parameters => { try { return View["Agents", new {Time = DateTime.UtcNow}]; } catch (Exception e) { _logger.WarnException(e.Message, e); return null; } }; }
public BXSSMainWindow() { _settings = new BXSSSettings(); _settings.Load(); _screenshot = new Screenshot( KSPUtil.ApplicationRootPath + "PluginData/BXSS/", () => { _prevUIState = Visible; Visible = false; if(_mainUIEnabled) RenderingManager.ShowUI(false); }, () => { Visible = _prevUIState; if(_mainUIEnabled) RenderingManager.ShowUI(true); }); _collapsed = true; _mainUIEnabled = true; _autoIntervalEnabled = false; _autoIntervalStopwatch = new Stopwatch(); WindowPosition = _settings.WindowPosition; Caption = "B.X.S.S"; SetupControls(); }
public CastSpellAction() { _spamControl = new Stopwatch(); QueueIsRunning = false; Properties["Casted"] = new MetaProp("Casted", typeof(int), new ReadOnlyAttribute(true)); Properties["SpellName"] = new MetaProp("SpellName", typeof(string), new ReadOnlyAttribute(true)); Properties["Repeat"] = new MetaProp("Repeat", typeof(DynamicProperty<int>), new TypeConverterAttribute(typeof(DynamicProperty<int>.DynamivExpressionConverter))); Properties["Entry"] = new MetaProp("Entry", typeof(uint)); Properties["CastOnItem"] = new MetaProp("CastOnItem", typeof(bool), new DisplayNameAttribute("Cast on Item")); Properties["ItemType"] = new MetaProp("ItemType", typeof(InventoryType), new DisplayNameAttribute("Item Type")); Properties["ItemId"] = new MetaProp("ItemId", typeof(uint)); Properties["RepeatType"] = new MetaProp("RepeatType", typeof(RepeatCalculationType), new DisplayNameAttribute("Repeat Type")); // Properties["Recipe"] = new MetaProp("Recipe", typeof(Recipe), new TypeConverterAttribute(typeof(RecipeConverter))); Casted = 0; Repeat = new DynamicProperty<int>(this,"1"); RegisterDynamicProperty("Repeat"); Entry = 0u; RepeatType = RepeatCalculationType.Craftable; Recipe = null; CastOnItem = false; ItemType = InventoryType.Chest; ItemId = 0u; Properties["SpellName"].Value = SpellName; //Properties["Recipe"].Show = false; Properties["ItemType"].Show = false; Properties["ItemId"].Show = false; Properties["Casted"].PropertyChanged += OnCounterChanged; CheckTradeskillList(); Properties["RepeatType"].PropertyChanged += CastSpellActionPropertyChanged; Properties["Entry"].PropertyChanged += OnEntryChanged; Properties["CastOnItem"].PropertyChanged += CastOnItemChanged; }
public void ComputeTimesPrimes() { Stopwatch w = new Stopwatch(); w.Start(); PrimeNumbers.GeneratePrimeNumbers1(100000); Console.WriteLine("Primes 1: " + w.ElapsedMilliseconds.ToString()); w.Stop(); w.Reset(); w.Start(); PrimeNumbers.GeneratePrimeNumbers2(100000); Console.WriteLine("Primes 2: "+ w.ElapsedMilliseconds.ToString()); w.Stop(); w.Reset(); w.Start(); PrimeNumbers.GeneratePrimeNumbers3(100000); Console.WriteLine("Primes 3: " + w.ElapsedMilliseconds.ToString()); w.Stop(); w.Start(); for (int i = 1; i <= 100000; i++) { int mod = i % 2; } w.Stop(); Console.WriteLine("Primes 4: " + w.ElapsedMilliseconds.ToString()); }
public void InsertALot() { int n = 100000; var q = from i in Enumerable.Range(1, n) select new TestObj() { Text = "I am" }; var objs = q.ToArray(); var db = new TestDb(Path.GetTempFileName()); db.Trace = false; var sw = new Stopwatch(); sw.Start(); var numIn = db.InsertAll(objs); sw.Stop(); Assert.AreEqual(numIn, n, "Num inserted must = num objects"); var inObjs = db.CreateCommand("select * from TestObj").ExecuteQuery<TestObj>().ToArray(); for (var i = 0; i < inObjs.Length; i++) { Assert.AreEqual(i+1, objs[i].Id); Assert.AreEqual(i+1, inObjs[i].Id); Assert.AreEqual("I am", inObjs[i].Text); } var numCount = db.CreateCommand("select count(*) from TestObj").ExecuteScalar<int>(); Assert.AreEqual(numCount, n, "Num counted must = num objects"); }
private static void Main(string[] args) { Stopwatch timer = new Stopwatch(); timer.Start(); arguments = new Arguments(Environment.CommandLine); try { GenerateAll(); if (compiler.Templates.Count > 0) { Console.WriteLine("Running Compiler..."); compiler.Run(); Console.WriteLine("Writing Files..."); foreach (Template template in compiler.Templates) using (StreamWriter sw = File.CreateText(template.OutputPath)) sw.Write(template.FinalCode); Console.WriteLine("Done!"); } } catch (Exception x) { Console.WriteLine("Error Message: {0}", x); Console.ReadKey(); } timer.Stop(); Console.WriteLine("Execution Time: " + timer.ElapsedMilliseconds + "ms"); }
///<summary>Select All EHRMeasures from combination of db, static data, and complex calculations.</summary> public static List<EhrMeasure> SelectAllMu2(DateTime dateStart,DateTime dateEnd,long provNum) { if(RemotingClient.RemotingRole==RemotingRole.ClientWeb) { return Meth.GetObject<List<EhrMeasure>>(MethodBase.GetCurrentMethod(),dateStart,dateEnd,provNum); } List<EhrMeasure> retVal=GetMU2List(); Stopwatch s=new Stopwatch(); for(int i=0;i<retVal.Count;i++) { s.Restart(); retVal[i].Objective=GetObjectiveMu2(retVal[i].MeasureType); retVal[i].Measure=GetMeasureMu2(retVal[i].MeasureType); retVal[i].PercentThreshold=GetThresholdMu2(retVal[i].MeasureType); DataTable table=GetTableMu2(retVal[i].MeasureType,dateStart,dateEnd,provNum); if(table==null) { retVal[i].Numerator=-1; retVal[i].Denominator=-1; } else { retVal[i].Numerator=CalcNumerator(table); retVal[i].Denominator=table.Rows.Count; } retVal[i].NumeratorExplain=GetNumeratorExplainMu2(retVal[i].MeasureType); retVal[i].DenominatorExplain=GetDenominatorExplainMu2(retVal[i].MeasureType); retVal[i].ExclusionExplain=GetExclusionExplainMu2(retVal[i].MeasureType); retVal[i].ExclusionCount=GetExclusionCountMu2(retVal[i].MeasureType,dateStart,dateEnd,provNum); retVal[i].ExclusionCountDescript=GetExclusionCountDescriptMu2(retVal[i].MeasureType); s.Stop(); retVal[i].ElapsedTime=s.Elapsed; } return retVal; }
private void CreateMDBCommandExecute() { if (IsCopying) return; IsCopying = true; Stopwatch sw = new Stopwatch(); sw.Start(); bool hasError = false; IsShowingCopyProgress = true; string guid = Guid.NewGuid().ToString(); string fileName = _project.FileName.Replace(".prj", String.Empty); Task.Factory.StartNew( () => { Project newProject = ContactTracing.ImportExport.ImportExportHelper.CreateNewOutbreak(Country, _appCulture, @"Projects\VHF\" + fileName + "_" + guid + ".prj", @"Projects\VHF\" + fileName + "_" + guid + ".mdb", _outbreakDate.Ticks.ToString(), _outbreakName); ContactTracing.ImportExport.FormCopier formCopier = new ImportExport.FormCopier(_project, newProject, _caseForm); formCopier.SetProgressBar += formCopier_SetProgressBar; formCopier.SetMaxProgressBarValue += formCopier_SetMaxProgressBarValue; try { formCopier.Copy(); } catch (Exception ex) { hasError = true; System.Windows.Forms.MessageBox.Show(ex.Message, "Exception", System.Windows.Forms.MessageBoxButtons.OK, System.Windows.Forms.MessageBoxIcon.Error); } finally { formCopier.SetProgressBar -= formCopier_SetProgressBar; formCopier.SetMaxProgressBarValue -= formCopier_SetMaxProgressBarValue; } }, System.Threading.CancellationToken.None, TaskCreationOptions.LongRunning, TaskScheduler.Default).ContinueWith( delegate { ProgressValue = 0; IsCopying = false; sw.Stop(); if (hasError) { CopyStatus = "Copying halted due to error."; System.IO.File.Delete(@"Projects\VHF\" + fileName + "_" + guid + ".prj"); System.IO.File.Delete(@"Projects\VHF\" + fileName + "_" + guid + ".mdb"); } else { CopyStatus = "Finished copying data to mdb file. Elapsed time: " + sw.Elapsed.TotalMinutes.ToString("F1") + " minutes."; } }, TaskScheduler.FromCurrentSynchronizationContext()); }
/// <summary> /// Executes multiple updates concurrently within one or more Batches, with each batch limited to 1000 records. /// </summary> /// <param name="serviceManager"></param> /// <param name="recordsToBeUpdated"></param> /// <param name="totalRequestsPerBatch"></param> /// <returns></returns> public static List<Guid> UpdateParallelExecuteMultiple(OrganizationServiceManager serviceManager, EntityCollection recordsToBeUpdated, int totalRequestsPerBatch) { int batchSize = 0; int recordsToBeUpdatedCount = recordsToBeUpdated.Entities.Count; int batchNumber = (int)Math.Ceiling((recordsToBeUpdated.Entities.Count * 1.0d) / (totalRequestsPerBatch * 1.0d)); Console.WriteLine(); log.Info("Update Mode: Parallel Execute Multiple"); List<Guid> ids = new List<Guid>(); List<string> updatedAttributes = new List<string>(); // List of updated attributes to be logged IDictionary<string, ExecuteMultipleRequest> requests = new Dictionary<string, ExecuteMultipleRequest>(); for (int i = 0; i < batchNumber; i++) { ExecuteMultipleRequest executeMultipleRequest = new ExecuteMultipleRequest() { Requests = new OrganizationRequestCollection(), Settings = new ExecuteMultipleSettings() { ContinueOnError = true, ReturnResponses = true }, RequestId = Guid.NewGuid() }; for (int j = recordsToBeUpdated.Entities.Count - 1; j >= 0; j--) { ids.Add(recordsToBeUpdated.Entities[j].Id); UpdateStringAttributes(recordsToBeUpdated.Entities[j], updatedAttributes); UpdateRequest updateRequest = new UpdateRequest() { Target = recordsToBeUpdated.Entities[j], RequestId = Guid.NewGuid() }; executeMultipleRequest.Requests.Add(updateRequest); recordsToBeUpdated.Entities.RemoveAt(j); if (batchSize == totalRequestsPerBatch - 1) // If we reach the batch limit, break from the loop { break; } batchSize++; } batchSize = 0; requests.Add(new KeyValuePair<string, ExecuteMultipleRequest>(i.ToString(), executeMultipleRequest)); log.InfoFormat("Request Id for request batch number {0}: {1}", i, executeMultipleRequest.RequestId); } // Log the updated attributes log.Info("Attribute(s) to be updated:"); LogUpdatedAttributes(updatedAttributes); log.InfoFormat("Updating {0} record(s)...", recordsToBeUpdatedCount); Stopwatch sw = new Stopwatch(); sw.Start(); // Parallel execution of all ExecuteMultipleRequest in the requests Dictionary IDictionary<string, ExecuteMultipleResponse> responseForUpdatedRecords = serviceManager.ParallelProxy.Execute<ExecuteMultipleRequest, ExecuteMultipleResponse>(requests); int threadsCount = Process.GetCurrentProcess().Threads.Count; sw.Stop(); log.InfoFormat("Number of threads used: {0}", threadsCount); log.InfoFormat("Seconds to Update {0} record(s): {1}s", recordsToBeUpdatedCount, sw.Elapsed.TotalSeconds); return ids; }
/// <summary> /// Perform the second step of 2-phase load. Fully initialize the entity instance. /// After processing a JDBC result set, we "resolve" all the associations /// between the entities which were instantiated and had their state /// "hydrated" into an array /// </summary> public static async Task InitializeEntityAsync(object entity, bool readOnly, ISessionImplementor session, PreLoadEvent preLoadEvent, PostLoadEvent postLoadEvent, CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); //TODO: Should this be an InitializeEntityEventListener??? (watch out for performance!) bool statsEnabled = session.Factory.Statistics.IsStatisticsEnabled; var stopWath = new Stopwatch(); if (statsEnabled) { stopWath.Start(); } IPersistenceContext persistenceContext = session.PersistenceContext; EntityEntry entityEntry = persistenceContext.GetEntry(entity); if (entityEntry == null) { throw new AssertionFailure("possible non-threadsafe access to the session"); } IEntityPersister persister = entityEntry.Persister; object id = entityEntry.Id; object[] hydratedState = entityEntry.LoadedState; if (log.IsDebugEnabled) log.Debug("resolving associations for " + MessageHelper.InfoString(persister, id, session.Factory)); IType[] types = persister.PropertyTypes; for (int i = 0; i < hydratedState.Length; i++) { object value = hydratedState[i]; if (!Equals(LazyPropertyInitializer.UnfetchedProperty, value) && !(Equals(BackrefPropertyAccessor.Unknown, value))) { hydratedState[i] = await (types[i].ResolveIdentifierAsync(value, session, entity, cancellationToken)).ConfigureAwait(false); } } //Must occur after resolving identifiers! if (session.IsEventSource) { preLoadEvent.Entity = entity; preLoadEvent.State = hydratedState; preLoadEvent.Id = id; preLoadEvent.Persister=persister; IPreLoadEventListener[] listeners = session.Listeners.PreLoadEventListeners; for (int i = 0; i < listeners.Length; i++) { await (listeners[i].OnPreLoadAsync(preLoadEvent, cancellationToken)).ConfigureAwait(false); } } persister.SetPropertyValues(entity, hydratedState); ISessionFactoryImplementor factory = session.Factory; if (persister.HasCache && session.CacheMode.HasFlag(CacheMode.Put)) { if (log.IsDebugEnabled) log.Debug("adding entity to second-level cache: " + MessageHelper.InfoString(persister, id, session.Factory)); object version = Versioning.GetVersion(hydratedState, persister); CacheEntry entry = new CacheEntry(hydratedState, persister, entityEntry.LoadedWithLazyPropertiesUnfetched, version, session, entity); CacheKey cacheKey = session.GenerateCacheKey(id, persister.IdentifierType, persister.RootEntityName); bool put = await (persister.Cache.PutAsync(cacheKey, persister.CacheEntryStructure.Structure(entry), session.Timestamp, version, persister.IsVersioned ? persister.VersionType.Comparator : null, UseMinimalPuts(session, entityEntry), cancellationToken)).ConfigureAwait(false); if (put && factory.Statistics.IsStatisticsEnabled) { factory.StatisticsImplementor.SecondLevelCachePut(persister.Cache.RegionName); } } bool isReallyReadOnly = readOnly; if (!persister.IsMutable) { isReallyReadOnly = true; } else { object proxy = persistenceContext.GetProxy(entityEntry.EntityKey); if (proxy != null) { // there is already a proxy for this impl // only set the status to read-only if the proxy is read-only isReallyReadOnly = ((INHibernateProxy)proxy).HibernateLazyInitializer.ReadOnly; } } if (isReallyReadOnly) { //no need to take a snapshot - this is a //performance optimization, but not really //important, except for entities with huge //mutable property values persistenceContext.SetEntryStatus(entityEntry, Status.ReadOnly); } else { //take a snapshot TypeHelper.DeepCopy(hydratedState, persister.PropertyTypes, persister.PropertyUpdateability, hydratedState, session); persistenceContext.SetEntryStatus(entityEntry, Status.Loaded); } persister.AfterInitialize(entity, entityEntry.LoadedWithLazyPropertiesUnfetched, session); if (session.IsEventSource) { postLoadEvent.Entity = entity; postLoadEvent.Id = id; postLoadEvent.Persister = persister; IPostLoadEventListener[] listeners = session.Listeners.PostLoadEventListeners; for (int i = 0; i < listeners.Length; i++) { listeners[i].OnPostLoad(postLoadEvent); } } if (log.IsDebugEnabled) log.Debug("done materializing entity " + MessageHelper.InfoString(persister, id, session.Factory)); if (statsEnabled) { stopWath.Stop(); factory.StatisticsImplementor.LoadEntity(persister.EntityName, stopWath.Elapsed); } }
private void OnAggregationTimer2(object sender, ElapsedEventArgs e) { int valuesNumber = 0; Console.WriteLine("Begin calc"); Stopwatch sw = Stopwatch.StartNew(); Stopwatch waitSw = new Stopwatch(); DateTime d = DateTime.Now.RoundTo(_aggregationPeriod).Add(-_aggregationPeriod); var newBag = new ConcurrentBag <Tuple <AggregationKey, double> >(); _rwLock.EnterWriteLock(); ConcurrentBag <Tuple <AggregationKey, double> > tmp; try { tmp = Interlocked.Exchange(ref _bag, newBag); } finally { _rwLock.ExitWriteLock(); waitSw.Stop(); } Dictionary <AggregationKey, List <double> > dic = new Dictionary <AggregationKey, List <double> >(); foreach (Tuple <AggregationKey, double> tuple in tmp) { List <double> lst; if (dic.TryGetValue(tuple.Item1, out lst)) { lst.Add(tuple.Item2); } else { dic.Add(tuple.Item1, new List <double> { tuple.Item2 }); } } List <Task> tasks = new List <Task>(); foreach (var kvp in dic) { var localkvp = kvp; valuesNumber += localkvp.Value.Count; tasks.Add(Task <AggregatedValue> .Factory.StartNew(() => Calculate(localkvp.Key, localkvp.Value, d), CancellationToken.None, TaskCreationOptions.None, PrioritySheduler.AboveNormal) //.ContinueWith(t => _onAggregateCompleteAction(t.Result)) ); } foreach (var task in tasks) { task.Wait(); } sw.Stop(); Console.WriteLine("Aggregated and saved {0} groups with total of {3} values in {1} ms. Wait time: {2}", tmp.Count, sw.ElapsedMilliseconds, waitSw.ElapsedMilliseconds, valuesNumber); if (tmp.Count < 10) { Console.WriteLine(String.Join("\r\n", dic.Keys.Select(k => "Date: " + k.Date + ", " + String.Join(", ", k.Props.Select(p => p.Key + ": " + p.Value))))); } }
private async Task startImageExposition() { cts = new CancellationTokenSource(); try { configureImagePictureBox(); outputContent = new List <string>(); wordLabel.Visible = false; // restart elapsed miliseconds elapsedTime = 0; Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); await Task.Delay(currentTest.ProgramInUse.IntervalTime, cts.Token); // beginAudio if (currentTest.ProgramInUse.AudioCapture) { startRecordingAudio(); } for (int counter = 0; counter < currentTest.ProgramInUse.NumExpositions && runExposition; counter++) { imgPictureBox.Visible = false; subtitleLabel.Visible = false; await intervalOrFixPoint(currentTest.ProgramInUse, cts.Token); drawImage(); if (currentTest.ProgramInUse.ExpositionType == "imgaud") { playAudio(); } elapsedTime = stopwatch.ElapsedMilliseconds; SendKeys.SendWait(currentTest.Mark.ToString()); //sending event to neuronspectrum imgPictureBox.Visible = true; showSubtitle(); currentTest.writeLineOutputResult(currentStimulus, "false", counter + 1, outputContent, elapsedTime, StrList.outPutItemName(currentAudio)); await Task.Delay(currentTest.ProgramInUse.ExpositionTime, cts.Token); Player.Stop(); } imgPictureBox.Visible = false; subtitleLabel.Visible = false; await Task.Delay(currentTest.ProgramInUse.IntervalTime, cts.Token); // beginAudio if (currentTest.ProgramInUse.AudioCapture) { stopRecordingAudio(); } } catch (TaskCanceledException) { Player.Stop(); if (currentTest.ProgramInUse.AudioCapture) { stopRecordingAudio(); } finishExposition(); } catch (Exception ex) { throw new Exception(ex.Message); } cts = null; }
public static async Task SeedUsers() { var watch = Stopwatch.StartNew(); DateTime sessionStart = DateTime.Now; var userManager = ServiceProvider.GetRequiredService <UserManager <User> >(); var userDataContext = ServiceProvider.GetRequiredService <UserDataContext>(); var searchEngine = ServiceProvider.GetRequiredService <ISearchEngine <User> >(); searchEngine.ClearIndexes(); string data = File.ReadAllText("Data/UserSeedData.json"); var users = JsonConvert.DeserializeObject <List <User> >(data, new IsoDateTimeConverter() { DateTimeFormat = "d/MM/yyyy" }); const string passportTemplate = "https://randomuser.me/api/portraits/{0}/{1}.jpg"; List <int> maleList = Enumerable.Range(0, 99).OrderBy(o => Guid.NewGuid()).ToList(); List <int> femaleList = Enumerable.Range(0, 99).OrderBy(o => Guid.NewGuid()).ToList(); List <User> addedUsers = new List <User>(); for (int i = 0; i < users.Count; i++) { var user = users[i]; bool male = user.Gender.ToLower() == "Male".ToLower(); List <int> list = male ? maleList : femaleList; if (!list.Any()) { list = Enumerable. Range(0, 99).OrderBy(o => Guid.NewGuid()).ToList(); } user.UserName = user.FileNo.ToString(); await userManager.AddPasswordAsync(user, user.PhoneNumber.ToString()); var creationResult = await userManager.CreateAsync(user); string gender = male ? "men" : "women"; int photoIndex = list.First(); string url = string.Format(passportTemplate, gender, photoIndex); list.Remove(photoIndex); user.Activities.Add(new AccountActivity() { ActivityType = Models.Entities.Activity.AccountCreated }); user.Photos.Add(new Models.Entities.Photo() { Active = true, Type = Models.Entities.PhotoType.Passport, Url = url, DateAdded = DateTime.Now, PublicID = Guid.NewGuid().ToString() }); if (creationResult.Succeeded) { await userManager.AddToRoleAsync(user, UserRole.RegularUser.ToString()); } addedUsers.Add(user); } searchEngine.Index(addedUsers); watch.Stop(); Core.Log.Debug("*****\tSeeding Session Complete\t******"); Core.Log.Debug($"Session Start: {sessionStart}"); Core.Log.Debug($"Session End: {DateTime.Now}"); Core.Log.Debug($"Elapsed Time: {watch.Elapsed}"); }
public PerfLogger(string name) { this.name = name; stopWatch = Stopwatch.StartNew(); }
static PTrackerTimeProvider() { Source = new Stopwatch(); Source.Start(); }
public override async Task ExecuteAsync(CancellationToken cancellationToken) { cancellationToken.ThrowIfCancellationRequested(); object id = Key; ISessionImplementor session = Session; ICollectionPersister persister = Persister; IPersistentCollection collection = Collection; bool affectedByFilters = persister.IsAffectedByEnabledFilters(session); bool statsEnabled = session.Factory.Statistics.IsStatisticsEnabled; Stopwatch stopwatch = null; if (statsEnabled) { stopwatch = Stopwatch.StartNew(); } await (PreUpdateAsync(cancellationToken)).ConfigureAwait(false); if (!collection.WasInitialized) { if (!collection.HasQueuedOperations) { throw new AssertionFailure("no queued adds"); } //do nothing - we only need to notify the cache... } else if (!affectedByFilters && collection.Empty) { if (!emptySnapshot) { await (persister.RemoveAsync(id, session, cancellationToken)).ConfigureAwait(false); } } else if (collection.NeedsRecreate(persister)) { if (affectedByFilters) { throw new HibernateException("cannot recreate collection while filter is enabled: " + MessageHelper.CollectionInfoString(persister, collection, id, session)); } if (!emptySnapshot) { await (persister.RemoveAsync(id, session, cancellationToken)).ConfigureAwait(false); } await (persister.RecreateAsync(collection, id, session, cancellationToken)).ConfigureAwait(false); } else { await (persister.DeleteRowsAsync(collection, id, session, cancellationToken)).ConfigureAwait(false); await (persister.UpdateRowsAsync(collection, id, session, cancellationToken)).ConfigureAwait(false); await (persister.InsertRowsAsync(collection, id, session, cancellationToken)).ConfigureAwait(false); } Session.PersistenceContext.GetCollectionEntry(collection).AfterAction(collection); await (EvictAsync(cancellationToken)).ConfigureAwait(false); await (PostUpdateAsync(cancellationToken)).ConfigureAwait(false); if (statsEnabled) { stopwatch.Stop(); Session.Factory.StatisticsImplementor.UpdateCollection(Persister.Role, stopwatch.Elapsed); } }
private static void PrintResults(Score score, Stopwatch stopwatch, int i) { PrintScoreInfo(score); Console.WriteLine($"Iterations: {i}"); Console.WriteLine($"Total Time Elapsed: {stopwatch.Elapsed.Hours} Hours {stopwatch.Elapsed.Minutes} Minutes {stopwatch.Elapsed.Seconds} Seconds"); }
/// <summary> /// 生成收发清单 /// </summary> /// <param name="placeId">场所ID</param> /// <param name="userId">用户ID</param> /// <param name="departmentId">单位ID</param> /// <returns></returns> public async Task <ResultEntity> CreateReceiveSendBill(int placeId, int userId, int departmentId) { _logger.LogWarning($"--------------开始形成收发件清单--------------"); var result = new ResultEntity(); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); var receiveBarcodeLogs = await _barcodeLogRepository.GetQueryable().Where(a => !a.IsSynBill && a.CurrentPlaceId == placeId && a.DepartmentId == departmentId && a.BarcodeStatus == EnumBarcodeStatus.已签收).ToListAsync(); stopwatch.Stop(); _logger.LogWarning($"receiveBarcodeLogs:{stopwatch.Elapsed.TotalSeconds}"); stopwatch.Restart(); var sendBarcodeLogs = await _barcodeLogRepository.GetQueryable().Where(a => !a.IsSynBill && a.CurrentPlaceId == placeId && a.DepartmentId == departmentId && a.BarcodeStatus == EnumBarcodeStatus.已投递).ToListAsync(); stopwatch.Stop(); _logger.LogWarning($"sendBarcodeLogs:{stopwatch.Elapsed.TotalSeconds}"); if (receiveBarcodeLogs.Count > 0 || sendBarcodeLogs.Count > 0) { //清单全局使用一个流水,防止串号 var serialNo = await _serialNumberManager.GetSerialNumber(0, EnumSerialNumberType.清单); var receiveBarcodeNumbers = receiveBarcodeLogs.Select(a => a.BarcodeNumber).ToList(); stopwatch.Restart(); var receiveLettesr = await _letterRepository.GetQueryable().Include(a => a.ReceiveDepartment).Include(a => a.SendDepartment) .Where(a => receiveBarcodeNumbers.Contains(a.BarcodeNo)).ToListAsync(); stopwatch.Stop(); _logger.LogWarning($"receiveLettesr:{stopwatch.Elapsed.TotalSeconds}"); var sendBarcodeNumbers = sendBarcodeLogs.Select(a => a.BarcodeNumber).ToList(); stopwatch.Restart(); var sendLetters = await _letterRepository.GetQueryable().Include(a => a.ReceiveDepartment) .Include(a => a.SendDepartment) .Where(a => sendBarcodeNumbers.Contains(a.BarcodeNo)).ToListAsync(); stopwatch.Stop(); _logger.LogWarning($"sendLetters:{stopwatch.Elapsed.TotalSeconds}"); var department = await _departmentRepository.GetByIdAsync(departmentId); var user = await _userRepository.GetByIdAsync(userId); //添加主清单 var exchangeList = new ExchangeList { CreateBy = userId, ObjectName = department.Name, ExchangeUserId = userId, ExchangeUserName = user.DisplayName, CreateTime = DateTime.Now, DepartmentId = departmentId, Printed = false, Type = EnumListType.收发清单 }; exchangeList.ListNo = exchangeList.GetListNo(serialNo); await _exchangeListRepository.AddAsync(exchangeList); await _unitOfWork.CommitAsync(); //添加收件详情 stopwatch.Restart(); foreach (var letter in receiveLettesr) { var barcodeLog = receiveBarcodeLogs.FirstOrDefault(a => a.BarcodeNumber == letter.BarcodeNo); if (barcodeLog != null) { var exchangeListDetail = new ExchangeListDetail { BarcodeNo = letter.LetterNo, CustomData = letter.CustomData, DetailType = barcodeLog.BarcodeSubStatus == EnumBarcodeSubStatus.退回 ? EnumListDetailType.收退件 : EnumListDetailType.收件, ExchangeListId = exchangeList.Id, ReceiveDepartmentName = letter.ReceiveDepartment.FullName, SendDepartmentName = letter.SendDepartment.FullName, SecSecretLevelText = letter.GetSecretLevel(letter.BarcodeNo).ToString(), UrgencyLevelText = letter.GetUrgencyLevel(letter.BarcodeNo).ToString(), Time = barcodeLog.LastOperationTime }; await _exchangeListDetailRepository.AddAsync(exchangeListDetail); barcodeLog.IsSynBill = true; } } //添加发件详情 foreach (var letter in sendLetters) { var barcodeLog = sendBarcodeLogs.FirstOrDefault(a => a.BarcodeNumber == letter.BarcodeNo); if (barcodeLog != null) { var exchangeListDetail = new ExchangeListDetail { BarcodeNo = letter.LetterNo, CustomData = letter.CustomData, DetailType = barcodeLog.BarcodeSubStatus == EnumBarcodeSubStatus.退回 ? EnumListDetailType.发退件 : EnumListDetailType.发件, ExchangeListId = exchangeList.Id, ReceiveDepartmentName = letter.ReceiveDepartment.FullName, SendDepartmentName = letter.SendDepartment.FullName, SecSecretLevelText = letter.GetSecretLevel(letter.BarcodeNo).ToString(), UrgencyLevelText = letter.GetUrgencyLevel(letter.BarcodeNo).ToString(), Time = barcodeLog.OperationTime }; await _exchangeListDetailRepository.AddAsync(exchangeListDetail); barcodeLog.IsSynBill = true; } } stopwatch.Stop(); _logger.LogWarning($"循环插入:{stopwatch.Elapsed.TotalSeconds}"); await _unitOfWork.CommitAsync(); result.Success = true; result.Data = exchangeList.Id; } else { result.Message = "暂无收发清单数据"; _logger.LogWarning($"暂无收发清单数据"); } _logger.LogWarning($"--------------结束形成收发件清单--------------"); return(result); }
public static ExecuteResult RunCommand(String command, String args, String remoteWorkingDirectory, long timeoutMills = 0, StatusType timeoutStatus = StatusType.Failed, Action <string, string> callback = null, String callbackLabel = null, bool dryRun = false, bool returnStdout = true, String runAsDomain = null, String runAsUsername = null, String runasPassword = null) { StringBuilder stdout = new StringBuilder(); ExecuteResult result = new ExecuteResult(); int exitCode = 0; if (callback == null) { callback = LogTailer.ConsoleWriter; } Process process = new Process(); process.StartInfo.FileName = command; process.StartInfo.Arguments = args; process.StartInfo.WorkingDirectory = remoteWorkingDirectory; process.StartInfo.WindowStyle = ProcessWindowStyle.Hidden; process.StartInfo.CreateNoWindow = true; process.StartInfo.RedirectStandardOutput = true; process.StartInfo.RedirectStandardError = true; process.StartInfo.UseShellExecute = false; if (runAsDomain != null) { process.StartInfo.Domain = runAsDomain; } if (runAsUsername != null) { process.StartInfo.UserName = runAsUsername; } if (runasPassword != null) { SecureString ssPwd = new SecureString(); for (int i = 0; i < runasPassword.Length; i++) { ssPwd.AppendChar(runasPassword[i]); } process.StartInfo.Password = ssPwd; } callback?.Invoke(callbackLabel, "Starting Command : " + command + " " + args); if (!dryRun) { process.Start(); Thread stdOutReader = new Thread(delegate() { while (!process.StandardOutput.EndOfStream) { String line = process.StandardOutput.ReadLine(); if (returnStdout) { lock (stdout) { stdout.AppendLine(line); } } callback?.Invoke(callbackLabel, line); } }); stdOutReader.Start(); Thread stdErrReader = new Thread(delegate() { while (!process.StandardError.EndOfStream) { String line = process.StandardError.ReadLine(); if (returnStdout) { lock (stdout) { stdout.AppendLine(line); } } callback?.Invoke(callbackLabel, line); } }); stdErrReader.Start(); bool timeoutReached = false; Stopwatch timer = new Stopwatch(); timer.Start(); while (stdOutReader.IsAlive && stdErrReader.IsAlive && !(timeoutReached)) { if (timeoutMills > 0) { if (timer.ElapsedMilliseconds > timeoutMills) { timeoutReached = true; } } Thread.Sleep(500); } timer.Stop(); if (timeoutReached) { result.Status = timeoutStatus; String timeoutMessage = "TIMEOUT : Process [" + process.ProcessName + "] With Id [" + process.Id + "] Failed To Complete In [" + timeoutMills + "] Milliseconds And Was Termintated."; if (!process.HasExited) { process.Kill(); callback?.Invoke(callbackLabel, timeoutMessage); } else { timeoutMessage = "TIMEOUT : Process [" + process.ProcessName + "] With Id [" + process.Id + "] Failed To Complete In [" + timeoutMills + "] Milliseconds But May Have Completed."; callback?.Invoke(callbackLabel, timeoutMessage); } callback?.Invoke(callbackLabel, "TIMEOUT : Returning Timeout Stauts [" + result.Status + "]."); } exitCode = process.ExitCode; } else { callback?.Invoke(callbackLabel, "Dry Run Flag Set. Execution Skipped"); } result.ExitCode = exitCode; result.ExitData = stdout.ToString(); result.Message = "Exit Code = " + exitCode; callback?.Invoke(callbackLabel, result.Message); return(result); }
/// <summary> /// Load movies asynchronously /// </summary> public override async Task LoadMoviesAsync(bool reset = false) { await LoadingSemaphore.WaitAsync(); StopLoadingMovies(); if (reset) { Movies.Clear(); Page = 0; } var watch = Stopwatch.StartNew(); Page++; if (Page > 1 && Movies.Count == MaxNumberOfMovies) { Page--; LoadingSemaphore.Release(); return; } Logger.Info( "Loading movies favorites page..."); HasLoadingFailed = false; try { IsLoadingMovies = true; var imdbIds = await UserService.GetFavoritesMovies(Page); if (!NeedSync) { var movies = new List <MovieLightJson>(); await imdbIds.movies.ParallelForEachAsync(async imdbId => { try { var movie = await MovieService.GetMovieLightAsync(imdbId); if (movie != null) { movie.IsFavorite = true; movies.Add(movie); } } catch (Exception ex) { Logger.Error(ex); } }); var updatedMovies = movies.OrderBy(a => a.Title) .Where(a => (Genre == null || a.Genres.Contains(Genre.EnglishName)) && a.Rating >= Rating); foreach (var movie in updatedMovies.Except(Movies.ToList(), new MovieLightComparer())) { var pair = Movies .Select((value, index) => new { value, index }) .FirstOrDefault(x => string.CompareOrdinal(x.value.Title, movie.Title) > 0); if (pair == null) { Movies.Add(movie); } else { Movies.Insert(pair.index, movie); } } } else { var moviesToDelete = Movies.Select(a => a.ImdbCode).Except(imdbIds.allMovies); var moviesToAdd = imdbIds.allMovies.Except(Movies.Select(a => a.ImdbCode)); foreach (var movie in moviesToDelete.ToList()) { Movies.Remove(Movies.FirstOrDefault(a => a.ImdbCode == movie)); } var movies = moviesToAdd.ToList(); var moviesToAddAndToOrder = new List <MovieLightJson>(); await movies.ParallelForEachAsync(async imdbId => { try { var movie = await MovieService.GetMovieLightAsync(imdbId); if ((Genre == null || movie.Genres.Contains(Genre.EnglishName)) && movie.Rating >= Rating) { moviesToAddAndToOrder.Add(movie); } } catch (Exception ex) { Logger.Error(ex); } }); foreach (var movie in moviesToAddAndToOrder.Except(Movies.ToList(), new MovieLightComparer())) { var pair = Movies .Select((value, index) => new { value, index }) .FirstOrDefault(x => string.CompareOrdinal(x.value.Title, movie.Title) > 0); if (pair == null) { Movies.Add(movie); } else { Movies.Insert(pair.index, movie); } } } IsLoadingMovies = false; IsMovieFound = Movies.Any(); CurrentNumberOfMovies = Movies.Count; MaxNumberOfMovies = imdbIds.nbMovies; await UserService.SyncMovieHistoryAsync(Movies).ConfigureAwait(false); } catch (Exception exception) { Page--; Logger.Error( $"Error while loading movies favorite page {Page}: {exception.Message}"); HasLoadingFailed = true; Messenger.Default.Send(new ManageExceptionMessage(exception)); } finally { NeedSync = false; watch.Stop(); var elapsedMs = watch.ElapsedMilliseconds; Logger.Info( $"Loaded movies favorite page {Page} in {elapsedMs} milliseconds."); LoadingSemaphore.Release(); } }
public void SafeForConcurrentOperations() { var maxTimeAllowedForTest = TimeSpan.FromSeconds(20); var sw = Stopwatch.StartNew(); try { using (var cts = new CancellationTokenSource(maxTimeAllowedForTest)) { var cancellationToken = cts.Token; var sampleCertificate = SampleCertificate.SampleCertificates[SampleCertificate.CngPrivateKeyId]; var numThreads = 20; var numIterationsPerThread = 1; var exceptions = new BlockingCollection <Exception>(); void Log(string message) => Console.WriteLine($"{sw.Elapsed} {Thread.CurrentThread.Name}: {message}"); WindowsX509CertificateStore.ImportCertificateToStore( Convert.FromBase64String(sampleCertificate.Base64Bytes()), sampleCertificate.Password, StoreLocation.LocalMachine, "My", sampleCertificate.HasPrivateKey); CountdownEvent allThreadsReady = null; CountdownEvent allThreadsFinished = null; ManualResetEventSlim goForIt = new ManualResetEventSlim(false); Thread[] CreateThreads(int number, string name, Action action) => Enumerable.Range(0, number) .Select(i => new Thread(() => { allThreadsReady.Signal(); goForIt.Wait(cancellationToken); for (int j = 0; j < numIterationsPerThread; j++) { try { Log($"{name} {j}"); action(); } catch (Exception e) { Log(e.ToString()); exceptions.Add(e); } } allThreadsFinished.Signal(); }) { Name = $"{name}#{i}" }).ToArray(); var threads = CreateThreads(numThreads, "ImportCertificateToStore", () => { WindowsX509CertificateStore.ImportCertificateToStore( Convert.FromBase64String(sampleCertificate.Base64Bytes()), sampleCertificate.Password, StoreLocation.LocalMachine, "My", sampleCertificate.HasPrivateKey); }) .Concat(CreateThreads(numThreads, "AddPrivateKeyAccessRules", () => { WindowsX509CertificateStore.AddPrivateKeyAccessRules( sampleCertificate.Thumbprint, StoreLocation.LocalMachine, "My", new List <PrivateKeyAccessRule> { new PrivateKeyAccessRule("BUILTIN\\Users", PrivateKeyAccess.FullControl) }); })) .Concat(CreateThreads(numThreads, "GetPrivateKeySecurity", () => { var unused = WindowsX509CertificateStore.GetPrivateKeySecurity( sampleCertificate.Thumbprint, StoreLocation.LocalMachine, "My"); })).ToArray(); allThreadsReady = new CountdownEvent(threads.Length); allThreadsFinished = new CountdownEvent(threads.Length); foreach (var thread in threads) { thread.Start(); } allThreadsReady.Wait(cancellationToken); goForIt.Set(); allThreadsFinished.Wait(cancellationToken); foreach (var thread in threads) { Log($"Waiting for {thread.Name} to join..."); if (!thread.Join(TimeSpan.FromSeconds(1))) { Log($"Aborting {thread.Name}"); thread.Abort(); } } sw.Stop(); sampleCertificate.EnsureCertificateNotInStore("My", StoreLocation.LocalMachine); if (exceptions.Any()) { throw new AssertionException( $"The following exceptions were thrown during the test causing it to fail:{Environment.NewLine}{string.Join($"{Environment.NewLine}{new string('=', 80)}", exceptions.GroupBy(ex => ex.Message).Select(g => g.First().ToString()))}"); } if (sw.Elapsed > maxTimeAllowedForTest) { throw new TimeoutException( $"This test exceeded the {maxTimeAllowedForTest} allowed for this test to complete."); } } } catch (OperationCanceledException) { throw new TimeoutException($"This test took longer than {maxTimeAllowedForTest} to run"); } }
private async void StartButton_Click(object sender, RoutedEventArgs e) { if (!_running) { var duration = default(TimeSpan); int? count = null; var threads = Convert.ToInt32(((KeyValuePair<int, string>)Threads.SelectionBoxItem).Key); var threadAffinity = ThreadAffinity.IsChecked.HasValue && ThreadAffinity.IsChecked.Value; var pipelining = Convert.ToInt32(Pipelining.SelectionBoxItem); var durationText = (string)((ComboBoxItem)Duration.SelectedItem).Content; StatusProgressbar.IsIndeterminate = false; switch (durationText) { case "10 seconds": duration = TimeSpan.FromSeconds(10); break; case "20 seconds": duration = TimeSpan.FromSeconds(20); break; case "1 minute": duration = TimeSpan.FromMinutes(1); break; case "10 minutes": duration = TimeSpan.FromMinutes(10); break; case "1 hour": duration = TimeSpan.FromHours(1); break; case "Until canceled": duration = TimeSpan.MaxValue; StatusProgressbar.IsIndeterminate = true; break; case "1 run on 1 thread": count = 1; StatusProgressbar.IsIndeterminate = true; break; case "100 runs on 1 thread": count = 100; StatusProgressbar.IsIndeterminate = true; break; case "1000 runs on 1 thread": count = 1000; StatusProgressbar.IsIndeterminate = true; break; case "3000 runs on 1 thread": count = 3000; StatusProgressbar.IsIndeterminate = true; break; case "10000 runs on 1 thread": count = 10000; StatusProgressbar.IsIndeterminate = true; break; } if (string.IsNullOrWhiteSpace(Url.Text)) return; Uri uri; if (!Uri.TryCreate(Url.Text.Trim(), UriKind.Absolute, out uri)) return; Threads.IsEnabled = false; Duration.IsEnabled = false; Url.IsEnabled = false; Pipelining.IsEnabled = false; ThreadAffinity.IsEnabled = false; StartButton.Content = "Cancel"; _running = true; _cancellationTokenSource = new CancellationTokenSource(); var cancellationToken = _cancellationTokenSource.Token; StatusProgressbar.Value = 0; StatusProgressbar.Visibility = Visibility.Visible; Dictionary<string, string> headers = ParseHeaders(); if (count.HasValue) _task = Worker.Run(uri, count.Value, cancellationToken, headers); else _task = Worker.Run(uri, threads, threadAffinity, pipelining, duration, cancellationToken, headers); _task.GetAwaiter().OnCompleted(async () => { await JobCompleted(); }); if (StatusProgressbar.IsIndeterminate) return; var sw = new Stopwatch(); sw.Start(); while (!cancellationToken.IsCancellationRequested && duration.TotalMilliseconds > sw.Elapsed.TotalMilliseconds) { await Task.Delay(10); StatusProgressbar.Value = 100.0 / duration.TotalMilliseconds * sw.Elapsed.TotalMilliseconds; } if (!_running) return; StatusProgressbar.IsIndeterminate = true; StartButton.IsEnabled = false; } else { if (_cancellationTokenSource != null && !_cancellationTokenSource.IsCancellationRequested) _cancellationTokenSource.Cancel(); } }
protected override void LoadContent() { _watch = new Stopwatch(); _spriteBatch = new SpriteBatch(GraphicsDevice); _pixel = new Texture2D(GraphicsDevice, 1, 1); _pixel.SetData(new Color[] { Color.White }); _test = Content.Load <Texture2D>("test"); _font = Content.Load <SpriteFont>("arial"); //using (var s = File.OpenRead(@"C:\Users\Michal Piatkowski\Pictures\ikon 100.png")) //{ // var tex = Texture2D.FromStream(s, GraphicsDevice); // using(var u = tex.GetData<Color>()) // Console.WriteLine(u); //} //using (var file = File.OpenRead("risgrot.png")) //using (var img = Image.Load<Color>(file)) //using (var cropped = img.Mutate(x => x.Crop(8, 0, 20, 14))) //{ // cropped.Save("crusor.png"); // // _customCursor = MouseCursor.FromPixels(cropped, new Point(2, 2)/*(img.GetSize() / 2).ToPoint()*/); // Mouse.SetCursor(_customCursor); //} //Content.Load<SoundEffect>("Win Jingle").Play(0.02f, 1f, 0); string[] songs = new string[] { "retro level 1", "retro level 2", "retro level 3", }; _songs = new Song[songs.Length]; for (int i = 0; i < songs.Length; i++) { _watch.Restart(); _songs[i] = Content.Load <Song>(songs[i]); _songs[i].IsLooped = false; _songs[i].Volume = 0.015f; _songs[i].Pitch = 1f; _songs[i].Finished += (song) => Console.WriteLine("finished"); _songs[i].Looped += (song) => Console.WriteLine("looped"); _watch.Stop(); Console.WriteLine( "Content.Load<Song>('" + songs[i] + "') Time: " + _watch.ElapsedMilliseconds + "ms"); } //readers = new List<VorbisReader>(); //foreach (var file in Directory.EnumerateFiles(@"C:\Users\Michal Piatkowski\Music", "*.ogg", // new EnumerationOptions() // { // RecurseSubdirectories = true, // MatchCasing = MatchCasing.CaseInsensitive // })) //{ // try // { // Console.Write(file + " - "); // var reader = new VorbisReader(new NVorbis.Ogg.LightOggContainerReader(File.OpenRead(file), false)); // readers.Add(reader); // Console.WriteLine("Duration: " + reader.TotalTime); // } // catch(Exception ex) // { // Console.WriteLine("Error: " + ex.Message); // } //} }
public async IAsyncEnumerable <Image> Paint(string map) { var stopwatch = new Stopwatch(); stopwatch.Start(); await using var pairTracker = await PoolManager.GetServerClient(new PoolSettings (){ Map = map }); var server = pairTracker.Pair.Server; var client = pairTracker.Pair.Client; Console.WriteLine($"Loaded client and server in {(int) stopwatch.Elapsed.TotalMilliseconds} ms"); stopwatch.Restart(); var cEntityManager = client.ResolveDependency <IClientEntityManager>(); var cPlayerManager = client.ResolveDependency <Robust.Client.Player.IPlayerManager>(); await client.WaitPost(() => { if (cEntityManager.TryGetComponent(cPlayerManager.LocalPlayer !.ControlledEntity !, out SpriteComponent? sprite)) { sprite.Visible = false; } }); var sEntityManager = server.ResolveDependency <IServerEntityManager>(); var sPlayerManager = server.ResolveDependency <IPlayerManager>(); await PoolManager.RunTicksSync(pairTracker.Pair, 10); await Task.WhenAll(client.WaitIdleAsync(), server.WaitIdleAsync()); var sMapManager = server.ResolveDependency <IMapManager>(); var tilePainter = new TilePainter(client, server); var entityPainter = new GridPainter(client, server); IMapGrid[] grids = null !; await server.WaitPost(() => { var playerEntity = sPlayerManager.ServerSessions.Single().AttachedEntity; if (playerEntity.HasValue) { sEntityManager.DeleteEntity(playerEntity.Value); } grids = sMapManager.GetAllMapGrids(new MapId(1)).ToArray(); foreach (var grid in grids) { grid.WorldRotation = Angle.Zero; } }); await PoolManager.RunTicksSync(pairTracker.Pair, 10); await Task.WhenAll(client.WaitIdleAsync(), server.WaitIdleAsync()); foreach (var grid in grids) { // Skip empty grids if (grid.LocalAABB.IsEmpty()) { Console.WriteLine($"Warning: Grid {grid.Index} was empty. Skipping image rendering."); continue; } var tileXSize = grid.TileSize * TilePainter.TileImageSize; var tileYSize = grid.TileSize * TilePainter.TileImageSize; var bounds = grid.LocalAABB; var left = bounds.Left; var right = bounds.Right; var top = bounds.Top; var bottom = bounds.Bottom; var w = (int)Math.Ceiling(right - left) * tileXSize; var h = (int)Math.Ceiling(top - bottom) * tileYSize; var gridCanvas = new Image <Rgba32>(w, h); await server.WaitPost(() => { tilePainter.Run(gridCanvas, grid); entityPainter.Run(gridCanvas, grid); gridCanvas.Mutate(e => e.Flip(FlipMode.Vertical)); }); yield return(gridCanvas); } // We don't care if it fails as we have already saved the images. try { await pairTracker.CleanReturnAsync(); } catch { // ignored } }
private ReducingPerformanceStats SingleStepReduce(IndexToWorkOn index, List<string> keysToReduce, AbstractViewGenerator viewGenerator, ConcurrentSet<object> itemsToDelete, CancellationToken token) { var needToMoveToSingleStepQueue = new ConcurrentQueue<HashSet<string>>(); if ( Log.IsDebugEnabled ) Log.Debug(() => string.Format("Executing single step reducing for {0} keys [{1}]", keysToReduce.Count, string.Join(", ", keysToReduce))); var batchTimeWatcher = Stopwatch.StartNew(); var reducingBatchThrottlerId = Guid.NewGuid(); var reducePerformanceStats = new ReducingPerformanceStats(ReduceType.SingleStep); var reduceLevelStats = new ReduceLevelPeformanceStats { Started = SystemTime.UtcNow, Level = 2 }; try { var parallelOperations = new ConcurrentQueue<ParallelBatchStats>(); var parallelProcessingStart = SystemTime.UtcNow; BackgroundTaskExecuter.Instance.ExecuteAllBuffered(context, keysToReduce, enumerator => { var parallelStats = new ParallelBatchStats { StartDelay = (long)(SystemTime.UtcNow - parallelProcessingStart).TotalMilliseconds }; var localNeedToMoveToSingleStep = new HashSet<string>(); needToMoveToSingleStepQueue.Enqueue(localNeedToMoveToSingleStep); var localKeys = new HashSet<string>(); while (enumerator.MoveNext()) { token.ThrowIfCancellationRequested(); localKeys.Add(enumerator.Current); } transactionalStorage.Batch(actions => { var getItemsToReduceParams = new GetItemsToReduceParams(index: index.IndexId, reduceKeys: localKeys, level: 0, loadData: false, itemsToDelete: itemsToDelete) { Take = int.MaxValue // just get all, we do the rate limit when we load the number of keys to reduce, anyway }; var getItemsToReduceDuration = Stopwatch.StartNew(); int scheduledItemsSum = 0; int scheduledItemsCount = 0; List<int> scheduledItemsMappedBuckets = new List<int>(); using (StopwatchScope.For(getItemsToReduceDuration)) { foreach (var item in actions.MapReduce.GetItemsToReduce(getItemsToReduceParams, token)) { scheduledItemsMappedBuckets.Add(item.Bucket); scheduledItemsSum += item.Size; scheduledItemsCount++; } } parallelStats.Operations.Add(PerformanceStats.From(IndexingOperation.Reduce_GetItemsToReduce, getItemsToReduceDuration.ElapsedMilliseconds)); autoTuner.CurrentlyUsedBatchSizesInBytes.GetOrAdd(reducingBatchThrottlerId, scheduledItemsSum); if (scheduledItemsCount == 0) { // Here we have an interesting issue. We have scheduled reductions, because GetReduceTypesPerKeys() returned them // and at the same time, we don't have any at level 0. That probably means that we have them at level 1 or 2. // They shouldn't be here, and indeed, we remove them just a little down from here in this function. // That said, they might have smuggled in between versions, or something happened to cause them to be here. // In order to avoid that, we forcibly delete those extra items from the scheduled reductions, and move on Log.Warn("Found single reduce items ({0}) that didn't have any items to reduce. Deleting level 1 & level 2 items for those keys. (If you can reproduce this, please contact [email protected])", string.Join(", ", keysToReduce)); var deletingScheduledReductionsDuration = Stopwatch.StartNew(); using (StopwatchScope.For(deletingScheduledReductionsDuration)) { foreach (var reduceKey in keysToReduce) { token.ThrowIfCancellationRequested(); actions.MapReduce.DeleteScheduledReduction(index.IndexId, 1, reduceKey); actions.MapReduce.DeleteScheduledReduction(index.IndexId, 2, reduceKey); } } parallelStats.Operations.Add(PerformanceStats.From(IndexingOperation.Reduce_DeleteScheduledReductions, deletingScheduledReductionsDuration.ElapsedMilliseconds)); } var removeReduceResultsDuration = new Stopwatch(); foreach (var reduceKey in localKeys) { token.ThrowIfCancellationRequested(); var lastPerformedReduceType = actions.MapReduce.GetLastPerformedReduceType(index.IndexId, reduceKey); if (lastPerformedReduceType != ReduceType.SingleStep) localNeedToMoveToSingleStep.Add(reduceKey); if (lastPerformedReduceType != ReduceType.MultiStep) continue; if ( Log.IsDebugEnabled ) { Log.Debug("Key {0} was moved from multi step to single step reduce, removing existing reduce results records", reduceKey); } using (StopwatchScope.For(removeReduceResultsDuration)) { // now we are in single step but previously multi step reduce was performed for the given key var mappedBuckets = actions.MapReduce.GetMappedBuckets(index.IndexId, reduceKey, token); // add scheduled items too to be sure we will delete reduce results of already deleted documents foreach (var mappedBucket in mappedBuckets.Union(scheduledItemsMappedBuckets)) { actions.MapReduce.RemoveReduceResults(index.IndexId, 1, reduceKey, mappedBucket); actions.MapReduce.RemoveReduceResults(index.IndexId, 2, reduceKey, mappedBucket / 1024); } } } parallelStats.Operations.Add(PerformanceStats.From(IndexingOperation.Reduce_RemoveReduceResults, removeReduceResultsDuration.ElapsedMilliseconds)); parallelOperations.Enqueue(parallelStats); }); }); reduceLevelStats.Operations.Add(new ParallelPerformanceStats { NumberOfThreads = parallelOperations.Count, DurationMs = (long)(SystemTime.UtcNow - parallelProcessingStart).TotalMilliseconds, BatchedOperations = parallelOperations.ToList() }); var getMappedResultsDuration = new Stopwatch(); var reductionPerformanceStats = new List<IndexingPerformanceStats>(); var keysLeftToReduce = new HashSet<string>(keysToReduce); while (keysLeftToReduce.Count > 0) { var keysReturned = new HashSet<string>(); // Try to diminish the allocations happening because of .Resize() var mappedResults = new List<MappedResultInfo>(keysLeftToReduce.Count); context.TransactionalStorage.Batch(actions => { var take = context.CurrentNumberOfItemsToReduceInSingleBatch; using (StopwatchScope.For(getMappedResultsDuration)) { mappedResults = actions.MapReduce.GetMappedResults(index.IndexId, keysLeftToReduce, true, take, keysReturned, token, mappedResults); } }); var count = mappedResults.Count; int size = 0; foreach ( var item in mappedResults ) { item.Bucket = 0; size += item.Size; } var results = mappedResults.GroupBy(x => x.Bucket, x => JsonToExpando.Convert(x.Data)).ToArray(); context.MetricsCounters.ReducedPerSecond.Mark(results.Length); token.ThrowIfCancellationRequested(); var performance = context.IndexStorage.Reduce(index.IndexId, viewGenerator, results, 2, context, null, keysReturned, count); reductionPerformanceStats.Add(performance); autoTuner.AutoThrottleBatchSize(count, size, batchTimeWatcher.Elapsed); } var needToMoveToSingleStep = new HashSet<string>(); HashSet<string> set; while (needToMoveToSingleStepQueue.TryDequeue(out set)) { needToMoveToSingleStep.UnionWith(set); } foreach (var reduceKey in needToMoveToSingleStep) { string localReduceKey = reduceKey; transactionalStorage.Batch(actions => actions.MapReduce.UpdatePerformedReduceType(index.IndexId, localReduceKey, ReduceType.SingleStep)); } reduceLevelStats.Completed = SystemTime.UtcNow; reduceLevelStats.Duration = reduceLevelStats.Completed - reduceLevelStats.Started; reduceLevelStats.Operations.Add(PerformanceStats.From(IndexingOperation.Reduce_GetMappedResults, getMappedResultsDuration.ElapsedMilliseconds)); reduceLevelStats.Operations.Add(PerformanceStats.From(IndexingOperation.StorageCommit, 0)); // in single step we write directly to Lucene index foreach (var stats in reductionPerformanceStats) { reduceLevelStats.Add(stats); } reducePerformanceStats.LevelStats.Add(reduceLevelStats); } finally { long _; autoTuner.CurrentlyUsedBatchSizesInBytes.TryRemove(reducingBatchThrottlerId, out _); } return reducePerformanceStats; }
private ReducingPerformanceStats MultiStepReduce(IndexToWorkOn index, List<string> keysToReduce, AbstractViewGenerator viewGenerator, ConcurrentSet<object> itemsToDelete, CancellationToken token) { var needToMoveToMultiStep = new HashSet<string>(); transactionalStorage.Batch(actions => { foreach (var localReduceKey in keysToReduce) { token.ThrowIfCancellationRequested(); var lastPerformedReduceType = actions.MapReduce.GetLastPerformedReduceType(index.IndexId, localReduceKey); if (lastPerformedReduceType != ReduceType.MultiStep) needToMoveToMultiStep.Add(localReduceKey); if (lastPerformedReduceType != ReduceType.SingleStep) continue; // we exceeded the limit of items to reduce in single step // now we need to schedule reductions at level 0 for all map results with given reduce key var mappedItems = actions.MapReduce.GetMappedBuckets(index.IndexId, localReduceKey, token).ToList(); foreach (var result in mappedItems.Select(x => new ReduceKeyAndBucket(x, localReduceKey))) { actions.MapReduce.ScheduleReductions(index.IndexId, 0, result); } } }); var reducePerformance = new ReducingPerformanceStats(ReduceType.MultiStep); for (int i = 0; i < 3; i++) { var level = i; var reduceLevelStats = new ReduceLevelPeformanceStats() { Level = level, Started = SystemTime.UtcNow, }; var reduceParams = new GetItemsToReduceParams( index.IndexId, new HashSet<string>(keysToReduce), level, true, itemsToDelete); var gettingItemsToReduceDuration = new Stopwatch(); var scheduleReductionsDuration = new Stopwatch(); var removeReduceResultsDuration = new Stopwatch(); var storageCommitDuration = new Stopwatch(); bool retry = true; while (retry && reduceParams.ReduceKeys.Count > 0) { var reduceBatchAutoThrottlerId = Guid.NewGuid(); try { transactionalStorage.Batch(actions => { token.ThrowIfCancellationRequested(); actions.BeforeStorageCommit += storageCommitDuration.Start; actions.AfterStorageCommit += storageCommitDuration.Stop; var batchTimeWatcher = Stopwatch.StartNew(); reduceParams.Take = context.CurrentNumberOfItemsToReduceInSingleBatch; int size = 0; IList<MappedResultInfo> persistedResults; var reduceKeys = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase); using (StopwatchScope.For(gettingItemsToReduceDuration)) { persistedResults = actions.MapReduce.GetItemsToReduce(reduceParams, token); foreach (var item in persistedResults) { reduceKeys.Add(item.ReduceKey); size += item.Size; } } if (persistedResults.Count == 0) { retry = false; return; } var count = persistedResults.Count; autoTuner.CurrentlyUsedBatchSizesInBytes.GetOrAdd(reduceBatchAutoThrottlerId, size); if (Log.IsDebugEnabled) { if (persistedResults.Count > 0) { Log.Debug(() => string.Format("Found {0} results for keys [{1}] for index {2} at level {3} in {4}", persistedResults.Count, string.Join(", ", persistedResults.Select(x => x.ReduceKey).Distinct()), index.Index.PublicName, level, batchTimeWatcher.Elapsed)); } else { Log.Debug("No reduce keys found for {0}", index.Index.PublicName); } } token.ThrowIfCancellationRequested(); var requiredReduceNextTimeSet = new HashSet<ReduceKeyAndBucket>(persistedResults.Select(x => new ReduceKeyAndBucket(x.Bucket, x.ReduceKey)), ReduceKeyAndBucketEqualityComparer.Instance); using (StopwatchScope.For(removeReduceResultsDuration)) { foreach (var mappedResultInfo in requiredReduceNextTimeSet) { token.ThrowIfCancellationRequested(); actions.MapReduce.RemoveReduceResults(index.IndexId, level + 1, mappedResultInfo.ReduceKey, mappedResultInfo.Bucket); } } if (level != 2) { var reduceKeysAndBucketsSet = new HashSet<ReduceKeyAndBucket>(requiredReduceNextTimeSet.Select(x => new ReduceKeyAndBucket(x.Bucket / 1024, x.ReduceKey)), ReduceKeyAndBucketEqualityComparer.Instance); using (StopwatchScope.For(scheduleReductionsDuration)) { foreach (var reduceKeysAndBucket in reduceKeysAndBucketsSet) { token.ThrowIfCancellationRequested(); actions.MapReduce.ScheduleReductions(index.IndexId, level + 1, reduceKeysAndBucket); } } } token.ThrowIfCancellationRequested(); var reduceTimeWatcher = Stopwatch.StartNew(); var results = persistedResults.Where(x => x.Data != null) .GroupBy(x => x.Bucket, x => JsonToExpando.Convert(x.Data)) .ToList(); var performance = context.IndexStorage.Reduce(index.IndexId, viewGenerator, results, level, context, actions, reduceKeys, persistedResults.Count); context.MetricsCounters.ReducedPerSecond.Mark(results.Count()); reduceLevelStats.Add(performance); var batchDuration = batchTimeWatcher.Elapsed; if ( Log.IsDebugEnabled ) { Log.Debug("Indexed {0} reduce keys in {1} with {2} results for index {3} in {4} on level {5}", reduceKeys.Count, batchDuration, performance.ItemsCount, index.Index.PublicName, reduceTimeWatcher.Elapsed, level); } autoTuner.AutoThrottleBatchSize(count, size, batchDuration); }); } finally { long _; autoTuner.CurrentlyUsedBatchSizesInBytes.TryRemove(reduceBatchAutoThrottlerId, out _); } } reduceLevelStats.Completed = SystemTime.UtcNow; reduceLevelStats.Duration = reduceLevelStats.Completed - reduceLevelStats.Started; reduceLevelStats.Operations.Add(PerformanceStats.From(IndexingOperation.Reduce_GetItemsToReduce, gettingItemsToReduceDuration.ElapsedMilliseconds)); reduceLevelStats.Operations.Add(PerformanceStats.From(IndexingOperation.Reduce_ScheduleReductions, scheduleReductionsDuration.ElapsedMilliseconds)); reduceLevelStats.Operations.Add(PerformanceStats.From(IndexingOperation.Reduce_RemoveReduceResults, removeReduceResultsDuration.ElapsedMilliseconds)); reduceLevelStats.Operations.Add(PerformanceStats.From(IndexingOperation.StorageCommit, storageCommitDuration.ElapsedMilliseconds)); reducePerformance.LevelStats.Add(reduceLevelStats); } foreach (var reduceKey in needToMoveToMultiStep) { token.ThrowIfCancellationRequested(); string localReduceKey = reduceKey; transactionalStorage.Batch(actions => actions.MapReduce.UpdatePerformedReduceType(index.IndexId, localReduceKey, ReduceType.MultiStep)); } return reducePerformance; }
/// <summary> /// /// </summary> /// <param name="sender"></param> /// <param name="e"></param> void BackgroundWorkerDoWork(object sender, DoWorkEventArgs e) { if (SelectedTab == 0) { double progressStep = 100 / 4; Stopwatch stopWatchStep = new Stopwatch(); Stopwatch stopWatchGlobal = new Stopwatch(); try { stopWatchGlobal.Start(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <bool>(base.MessageManager.SetBusyMode), true); //Set culture for the new thread if (!string.IsNullOrEmpty(Setting.ReportingParameter.CultureName)) { var culture = new CultureInfo(Setting.ReportingParameter.CultureName); Thread.CurrentThread.CurrentCulture = culture; Thread.CurrentThread.CurrentUICulture = culture; } //Get result for the Application stopWatchStep.Restart(); ApplicationBLL.BuildApplicationResult(ActiveConnection, SelectedApplication); stopWatchStep.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, "Build result for the application", stopWatchStep.Elapsed); //Get result for the previous snapshot stopWatchStep.Restart(); SnapshotBLL.BuildSnapshotResult(ActiveConnection, SelectedSnapshot, true); stopWatchStep.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, "Build result for the selected snapshot", stopWatchStep.Elapsed); //Get result for the previuos snapshot if (PreviousSnapshot != null) { stopWatchStep.Restart(); SnapshotBLL.BuildSnapshotResult(ActiveConnection, PreviousSnapshot, false); stopWatchStep.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, "Build result for the previous snapshot", stopWatchStep.Elapsed); } //Launch generaion stopWatchStep.Restart(); GenerateReport(); stopWatchStep.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, "Report generated", stopWatchStep.Elapsed); //Show final message and unlock the screen stopWatchGlobal.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <string, TimeSpan>(base.MessageManager.OnReportGenerated), ReportFileName, stopWatchGlobal.Elapsed); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <bool>(base.MessageManager.SetBusyMode), false); } catch (Exception ex) { App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <Exception>(WorkerThreadException), ex); } } else { List <Application> Apps = new List <Application>(); List <Snapshot> Snapshots = new List <Snapshot>(); //if (SelectedTag != null) //{ //GetActive Connection ActiveConnection = (Setting != null) ? Setting.GetActiveConnection() : null; //Get list of domains if (_ActiveConnection != null) { try { using (CastDomainBLL castDomainBLL = new CastDomainBLL(ActiveConnection)) { Apps = castDomainBLL.GetCommonTaggedApplications(SelectedTag); } } catch (Exception ex) { base.MessageManager.OnErrorOccured(ex); } } if (Apps != null) { Application[] SelectedApps = Apps.ToArray <Application>(); double progressStep = 100 / 4; Stopwatch stopWatchStep = new Stopwatch(); Stopwatch stopWatchGlobal = new Stopwatch(); try { stopWatchGlobal.Start(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <bool>(base.MessageManager.SetBusyMode), true); //Set culture for the new thread if (!string.IsNullOrEmpty(Setting.ReportingParameter.CultureName)) { var culture = new CultureInfo(Setting.ReportingParameter.CultureName); Thread.CurrentThread.CurrentCulture = culture; Thread.CurrentThread.CurrentUICulture = culture; } string[] SnapsToIgnore = null; //Get result for the Portfolio stopWatchStep.Restart(); string[] AppsToIgnorePortfolioResult = PortfolioBLL.BuildPortfolioResult(ActiveConnection, SelectedApps); stopWatchStep.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, "Build result for the portfolio", stopWatchStep.Elapsed); List <Application> N_Apps = new List <Application>(); //Remove from Array the Ignored Apps for (int i = 0; i < SelectedApps.Count(); i++) { int intAppYes = 0; foreach (string s in AppsToIgnorePortfolioResult) { if (s == SelectedApps[i].Name) { intAppYes = 1; break; } else { intAppYes = 0; } } if (intAppYes == 0) { N_Apps.Add(SelectedApps[i]); } } Application[] N_SelectedApps = N_Apps.ToArray(); //GetActive Connection ActiveConnection = (Setting != null) ? Setting.GetActiveConnection() : null; //Get list of domains if (_ActiveConnection != null) { try { using (CastDomainBLL castDomainBLL = new CastDomainBLL(ActiveConnection)) { Snapshots = castDomainBLL.GetAllSnapshots(N_SelectedApps); } } catch (Exception ex) { base.MessageManager.OnErrorOccured(ex); } } List <Snapshot> N_Snaps = new List <Snapshot>(); //Get result for each app's latest snapshot if (Snapshots != null) { Snapshot[] SelectedApps_Snapshots = Snapshots.ToArray <Snapshot>(); //Get result for all snapshots in Portfolio stopWatchStep.Restart(); SnapsToIgnore = PortfolioSnapshotsBLL.BuildSnapshotResult(ActiveConnection, SelectedApps_Snapshots, true); stopWatchStep.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, "Build result for snapshots in portfolio", stopWatchStep.Elapsed); for (int i = 0; i < SelectedApps_Snapshots.Count(); i++) { int intRemoveYes = 0; foreach (string s in SnapsToIgnore) { if (s == SelectedApps_Snapshots[i].Href) { intRemoveYes = 1; break; } else { intRemoveYes = 0; } } if (intRemoveYes == 0) { N_Snaps.Add(SelectedApps_Snapshots[i]); } } Snapshot[] N_SelectedApps_Snapshots = N_Snaps.ToArray(); //Launch generaion stopWatchStep.Restart(); GenerateReportPortfolio(N_SelectedApps, N_SelectedApps_Snapshots, AppsToIgnorePortfolioResult, SnapsToIgnore); stopWatchStep.Stop(); } System.Text.StringBuilder sb = new System.Text.StringBuilder(); if ((AppsToIgnorePortfolioResult.Count() > 0 && AppsToIgnorePortfolioResult != null) || (SnapsToIgnore.Count() > 0 && SnapsToIgnore != null)) { sb.Append("Some Applications or Snapshots were ignored during processing REST API."); if (AppsToIgnorePortfolioResult.Count() > 0 && AppsToIgnorePortfolioResult != null) { AppsToIgnorePortfolioResult = AppsToIgnorePortfolioResult.Distinct().ToArray(); sb.Append("Ignored Applications are: "); for (int i = 0; i < AppsToIgnorePortfolioResult.Count(); i++) { if (i == 0) { sb.Append(AppsToIgnorePortfolioResult[i].ToString()); } else { sb.Append("," + AppsToIgnorePortfolioResult[i].ToString()); } } } if (SnapsToIgnore.Count() > 0 && SnapsToIgnore != null) { SnapsToIgnore = SnapsToIgnore.Distinct().ToArray(); sb.Append(" Ignored Snapshots are: "); for (int i = 0; i < SnapsToIgnore.Count(); i++) { if (i == 0) { sb.Append(_ActiveConnection.Url + "/" + SnapsToIgnore[i].ToString()); } else { sb.Append("," + _ActiveConnection.Url + "/" + SnapsToIgnore[i].ToString()); } } } App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, sb.ToString() + "", null); } App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, "Report generated", stopWatchStep.Elapsed); //Show final message and unlock the screen stopWatchGlobal.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <string, TimeSpan>(base.MessageManager.OnReportGenerated), ReportFileName, stopWatchGlobal.Elapsed); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <bool>(base.MessageManager.SetBusyMode), false); } catch (System.Net.WebException webEx) { LogHelper.Instance.LogErrorFormat ("Request URL '{0}' - Error execution : {1}" , "" , webEx.Message ); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <double, string, TimeSpan>(base.MessageManager.OnStepDone), progressStep, "Error Generating Report - " + webEx.Message + " - Typically happens when Report Generator does not find REST API (in schema)", stopWatchStep.Elapsed); stopWatchGlobal.Stop(); App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <bool>(base.MessageManager.SetBusyMode), false); } catch (Exception ex) { App.Current.Dispatcher.Invoke(DispatcherPriority.Normal, new Action <Exception>(WorkerThreadException), ex); } } } }
private async Task HandleTask(CommandMessage message, Task task) { // early out for instant tasks if (task.IsCompleted && !task.IsFaulted) { await task; await this.HandleTaskResult(message, null, task); return; } Stopwatch sw = new Stopwatch(); sw.Start(); // if we take too long, post the think message. while (!task.IsCompleted && !task.IsFaulted && sw.ElapsedMilliseconds < ThinkDelay) { await Task.Delay(10); } RestUserMessage?thinkMessage = null; if (!task.IsCompleted && !task.IsFaulted) { EmbedBuilder builder = new EmbedBuilder(); builder.Title = message.Message.Content.Truncate(256); builder.Description = WaitEmoji; thinkMessage = await message.Channel.SendMessageAsync(null, false, builder.Build()); // Discord doesn't like it when we edit embed to soon after posting them, as the edit // sometimes doesn't 'stick'. await Task.Delay(250); } // If we take way too long, post an abort message. while (!task.IsCompleted && !task.IsFaulted && sw.ElapsedMilliseconds < TaskTimeout) { await Task.Delay(10); } if (sw.ElapsedMilliseconds >= TaskTimeout && thinkMessage != null) { await thinkMessage.ModifyAsync(x => { x.Content = "I'm sorry. I seem to have lost my train of thought..."; x.Embed = null; }); Log.Write("Task timeout: " + message.Message.Content, "Bot"); return; } // Handle tasks that have gone poorly. if (task.IsFaulted) { if (thinkMessage != null) { await message.Channel.DeleteMessageAsync(thinkMessage); } if (task.Exception != null) { Exception ex = task.Exception; if (task.Exception.InnerException != null) { ex = task.Exception.InnerException; } ExceptionDispatchInfo.Capture(ex).Throw(); } else { throw new Exception("Task failed"); } } await this.HandleTaskResult(message, thinkMessage, task); }
protected ReducingPerformanceStats[] HandleReduceForIndex(IndexToWorkOn indexToWorkOn, CancellationToken token) { var viewGenerator = context.IndexDefinitionStorage.GetViewGenerator(indexToWorkOn.IndexId); if (viewGenerator == null) return null; bool operationCanceled = false; var itemsToDelete = new ConcurrentSet<object>(); var singleStepReduceKeys = new List<string>(); var multiStepsReduceKeys = new List<string>(); transactionalStorage.Batch(actions => { var mappedResultsInfo = actions.MapReduce.GetReduceTypesPerKeys(indexToWorkOn.IndexId, context.CurrentNumberOfItemsToReduceInSingleBatch, context.NumberOfItemsToExecuteReduceInSingleStep, token); foreach (var key in mappedResultsInfo) { token.ThrowIfCancellationRequested(); switch (key.OperationTypeToPerform) { case ReduceType.SingleStep: singleStepReduceKeys.Add(key.ReduceKey); break; case ReduceType.MultiStep: multiStepsReduceKeys.Add(key.ReduceKey); break; } } }); currentlyProcessedIndexes.TryAdd(indexToWorkOn.IndexId, indexToWorkOn.Index); var performanceStats = new List<ReducingPerformanceStats>(); try { if (singleStepReduceKeys.Count > 0) { if ( Log.IsDebugEnabled ) Log.Debug("SingleStep reduce for keys: {0}", singleStepReduceKeys.Select(x => x + ",")); var singleStepStats = SingleStepReduce(indexToWorkOn, singleStepReduceKeys, viewGenerator, itemsToDelete, token); performanceStats.Add(singleStepStats); } if (multiStepsReduceKeys.Count > 0) { if ( Log.IsDebugEnabled ) Log.Debug("MultiStep reduce for keys: {0}", multiStepsReduceKeys.Select(x => x + ",")); var multiStepStats = MultiStepReduce(indexToWorkOn, multiStepsReduceKeys, viewGenerator, itemsToDelete, token); performanceStats.Add(multiStepStats); } } catch (OperationCanceledException) { operationCanceled = true; } catch (AggregateException e) { var anyOperationsCanceled = e .InnerExceptions .OfType<OperationCanceledException>() .Any(); if (anyOperationsCanceled == false) throw; operationCanceled = true; } finally { var postReducingOperations = new ReduceLevelPeformanceStats { Level = -1, Started = SystemTime.UtcNow }; if (operationCanceled == false) { var deletingScheduledReductionsDuration = new Stopwatch(); var storageCommitDuration = new Stopwatch(); // whatever we succeeded in indexing or not, we have to update this // because otherwise we keep trying to re-index failed mapped results transactionalStorage.Batch(actions => { actions.BeforeStorageCommit += storageCommitDuration.Start; actions.AfterStorageCommit += storageCommitDuration.Stop; ScheduledReductionInfo latest; using (StopwatchScope.For(deletingScheduledReductionsDuration)) { latest = actions.MapReduce.DeleteScheduledReduction(itemsToDelete); } if (latest == null) return; actions.Indexing.UpdateLastReduced(indexToWorkOn.IndexId, latest.Etag, latest.Timestamp); }); postReducingOperations.Operations.Add(PerformanceStats.From(IndexingOperation.Reduce_DeleteScheduledReductions, deletingScheduledReductionsDuration.ElapsedMilliseconds)); postReducingOperations.Operations.Add(PerformanceStats.From(IndexingOperation.StorageCommit, storageCommitDuration.ElapsedMilliseconds)); } postReducingOperations.Completed = SystemTime.UtcNow; postReducingOperations.Duration = postReducingOperations.Completed - postReducingOperations.Started; performanceStats.Add(new ReducingPerformanceStats(ReduceType.None) { LevelStats = new List<ReduceLevelPeformanceStats> { postReducingOperations } }); Index _; currentlyProcessedIndexes.TryRemove(indexToWorkOn.IndexId, out _); } return performanceStats.ToArray(); }
/// <summary> /// Подготовка к копированию. Сбор сведений. /// </summary> public void Prepare(Dispatcher DGUI, ProjectItemIn projectSettings, DirInfoModel infoModel, Status status) { DGUI.BeginInvoke(() => { status.IsError = false; status.Message = "Подготовка.."; infoModel.OnPreparing(); }, DispatcherPriority.Normal); //+ checks if (projectSettings == null) { DGUI.BeginInvoke(() => { status.IsError = true; status.Message = "Укажите проект"; infoModel.CanPrepare = true; }); return; } if (String.IsNullOrWhiteSpace(projectSettings.DirFrom)) { DGUI.BeginInvoke(() => { status.IsError = true; status.Message = "Укажите путь к источникам"; infoModel.CanPrepare = true; }); return; } if (String.IsNullOrWhiteSpace(projectSettings.DirTo)) { DGUI.BeginInvoke(() => { status.IsError = true; status.Message = "Укажите путь назначения"; infoModel.CanPrepare = true; }); return; } var dirUtils = new DirectoryUtils(); var totalFiles = 0; Stopwatch stopWatch = new Stopwatch(); stopWatch.Start(); var filePrepareIndex = 0; Action <String> onCheckFn = (d) => { TimeSpan ts = stopWatch.Elapsed; //? а точно ли в этом потоке замер времени останалвивать? DGUI.BeginInvoke(() => { status.Message = String.Format("Подготовка (шаг 1).. {0} ({1:00}:{2:00}:{3:00}) -> {4}", ++filePrepareIndex, ts.Hours, ts.Minutes, ts.Seconds, d); }); }; //+ 1. ищем все папки для работы (исходящие) var dirsIgnore = dirUtils.SearchDirs(StringUtils.StringSplitNewLines(projectSettings.DirFromIgnore), null, onCheckFn); //, Conventions.DirSeparatorStr); var dirs = dirUtils.SearchDirs(StringUtils.StringSplitNewLines(projectSettings.DirFrom), dirsIgnore, onCheckFn); //, Conventions.DirSeparatorStr); //+ 2. собираем все файлы (с их полными путями) для копирования var filePrepareTwoIndex = 0; Action <String> onCheckFilesFn = (f) => { TimeSpan ts = stopWatch.Elapsed; //? а точно ли в этом потоке замер времени останалвивать? DGUI.BeginInvoke(() => { status.Message = String.Format("Подготовка (шаг 2).. {0} ({1:00}:{2:00}:{3:00}) -> {4}", ++filePrepareTwoIndex, ts.Hours, ts.Minutes, ts.Seconds, f); }); }; var files = dirUtils.SearchFiles(dirs, onCheckFilesFn); totalFiles = files.Count; //+ 3. ищем среди имеющихся по назначению уже точно такие-же (если нет таких файлов или новые, то отдаем в работу) - ради чего все и затеялось var dirFromStr = projectSettings.DirFrom; if (dirFromStr.EndsWith(Conventions.DirSubdirSymbol)) { dirFromStr = dirFromStr.Remove(dirFromStr.Length - 1); } var fileIndex = 0; foreach (var file in files) { TimeSpan ts = stopWatch.Elapsed; //? а точно ли в этом потоке замер времени останалвивать? DGUI.BeginInvoke(() => { status.Message = String.Format("Подготовка (шаг 3).. {0}/{1} ({2:00}:{3:00}:{4:00})", ++fileIndex, totalFiles, ts.Hours, ts.Minutes, ts.Seconds); }); if (String.IsNullOrWhiteSpace(file)) { continue; } if (!file.StartsWith(dirFromStr)) { continue; //- странно } //- приводим к пути назначению var fileOut = String.Format("{0}\\{1}", projectSettings.DirTo, file.Remove(0, dirFromStr.Length)); fileOut = fileOut.Replace("\\\\", "\\"); //? to regexp (учесть не только двойные слэши, но и множественные?) // NOTE: метод "infoModel.AddFile()" может не лучший по оптимизации для TreeView // NOTE: возможно, для TreeView делать разовую обновку быстрее, чем на каждом файле //- если новый, то точно копируем if (!dirUtils.ExistsFile(fileOut)) { DGUI.BeginInvoke(() => { infoModel.AddFile(file, fileOut, true, DirTreeViewItemModelStatus.Normal, String.Empty); }); } //- если уже есть такой, то смотрим своим алгоритмом - вся суть приложения else { var isFileInWork = false; try { isFileInWork = IsFileForReplaceAlgo(file, fileOut, projectSettings, dirUtils); } catch (Exception ex) { // некоторая ошибка при сравнении файлов (возможно, не смогли сверить контент) DGUI.BeginInvoke(() => { infoModel.Errors.Add(ex.Message); infoModel.AddFile(file, fileOut, false, DirTreeViewItemModelStatus.Error, ex.Message); }); } if (isFileInWork) { DGUI.BeginInvoke(() => { infoModel.AddFile(file, fileOut, false, DirTreeViewItemModelStatus.Normal, String.Empty); }); } } } stopWatch.Stop(); //+ end (summary) DGUI.BeginInvoke(() => { infoModel.TotalFilesFrom = totalFiles; infoModel.CanWork = infoModel.Items.Any(); // files.Any(); // TODO: в идеале проверить не отмененные файлы и директории infoModel.CanPrepare = true; infoModel.ErrorsFilesFrom = infoModel.Errors.Count(); if (infoModel.ErrorsFilesFrom > 0) { status.IsError = true; status.Message = String.Format("Подготовка завершена с ошибками (ошибок: {0})", infoModel.ErrorsFilesFrom); } else { status.Message = "Подготовка завершена"; } }); }
private SleepingStopwatch() { _stopwatch = Stopwatch.StartNew(); }
internal ToolStripMenuItem[] ProjectScanner(/*Action<int, int, string> onProgressChanged*/) { List <ToolStripMenuItem> returned = new List <ToolStripMenuItem>(); string FilesLocation = Properties.Settings.Default.DirProject; if (string.IsNullOrEmpty(FilesLocation)) { Log.System("Project Path not set, no scanning will be done"); return(null); } Log.System(string.Format("Scanning For projectfiles at : {0}", FilesLocation)); List <string> Filenames = Directory.EnumerateFiles(FilesLocation, "*" + Properties.Settings.Default.FileExtensionPm2, SearchOption.AllDirectories).ToList(); Log.System(string.Format("[Scanning Found projectfiles {0} out of {1} files, at : {2}]", Filenames.Count, Directory.EnumerateFiles(FilesLocation, "*.*", SearchOption.AllDirectories).Count(), FilesLocation)); Log.Spacer(); //Checking each found project. for (int i = 0; i < Filenames.Count; i++) { string shortfilename = "...\\" + Filenames[i].Substring(FilesLocation.Length + 1); string message = string.Format("[Scanning {0} out of {1}] {2}", i + 1, Filenames.Count, shortfilename); Log.System(message); // onProgressChanged(i + 1, Filenames.Count, message); Stopwatch sw = Stopwatch.StartNew(); try { BinaryFormatter binaryFmt = new BinaryFormatter(); // string FileName = string.Format(@"{0}\{1}{2}", Properties.Settings.Default.DirProjectTemp, p.ProjectName, Properties.Settings.Default.FileExtensionPm2); FileStream fs = new FileStream(Filenames[i], FileMode.Open); Project p = (Project)binaryFmt.Deserialize(fs); if (!p.projectDone) { message = string.Format("[Scanning {0} out of {1}] {2}", i + 1, Filenames.Count, "Project not done, will be added to list"); try { ToolStripMenuItem tsmi = new ToolStripMenuItem(string.Format("{0}. {1}", i, p.ProjectName)); switch (p.CurretPriority) { case 0: tsmi.Image = Properties.Resources.pmtpGrayIcon16; break; case 1: tsmi.Image = Properties.Resources.pmtpGrayIcon16; break; case 2: tsmi.Image = Properties.Resources.pmtpBlueIcon16; break; case 3: tsmi.Image = Properties.Resources.pmtpYellowIcon16; break; case 4: tsmi.Image = Properties.Resources.pmtpRedIcon16; break; default: tsmi.Image = Properties.Resources.pmtpGrayIcon16; break; } tsmi.Click += (sender, ex) => { ProjectOverview po = new ProjectOverview(p); foreach (Form f in Application.OpenForms) { if (f is Main) { po.MdiParent = f; po.Show(); break; } else { MessageBox.Show("Could not find main form."); po.Dispose(); } } }; returned.Add(tsmi); Log.System(message); } catch (System.Runtime.Serialization.SerializationException e) { Log.Error(string.Format("Following error occured during runtime {0}, with message {1}", e.GetType(), e.Message)); } } fs.Close(); } catch { Log.Error(string.Format("Coldnot load Project {0} migth be a to old version.", shortfilename[i])); } sw.Stop(); Log.LapsTime(sw.Elapsed, DateTime.Now, "Scanning Project Files"); Log.System(string.Format("[Scanning {0} out of {1}] Scanned in {2} s", i + 1, Filenames.Count, sw.Elapsed.TotalSeconds)); Log.Spacer(); } return(returned.ToArray()); }
private void CopyItemWithInners(Dispatcher DGUI, DirInfoModel infoModel, DirTreeViewItemModel item, Status status, Int32 totalCount, Stopwatch stopWatch, ref Int32 currentIndex) { if (item == null) { return; } if (item.IsExcludeFromWork) { return; } if (!infoModel.IsCopyWithErrorAfterPrepare && !item.IsDir && item.Status == DirTreeViewItemModelStatus.Error) { return; // с ошибками не трогаем } //+ copy file if (!item.IsDir) { //+ count (loading steps) - только файлы считаем var index = currentIndex++; TimeSpan ts = stopWatch.Elapsed; DGUI.BeginInvoke(() => { status.Message = String.Format("Копирование.. {0}/{1} ({2:00}:{3:00}:{4:00})", index, totalCount, ts.Hours, ts.Minutes, ts.Seconds); }); try { System.IO.File.Copy(item.SourcePath, item.DistancePath, true); DGUI.BeginInvoke(() => { item.AsSuccess(); }); } catch (Exception ex) { var msg = ex.Message; //? to last inner msg? DGUI.BeginInvoke(() => { item.AsError(msg); infoModel.Errors.Add(msg); }); } } //+ create dir else { try { if (!System.IO.Directory.Exists(item.DistancePath)) { System.IO.Directory.CreateDirectory(item.DistancePath); DGUI.BeginInvoke(() => { item.AsSuccess(); }); } else { //- директория уже была, и мы ничего не делали, но мы все-равно ее подсветим успешно DGUI.BeginInvoke(() => { item.AsSuccess(); }); } } catch (Exception ex) { var msg = ex.Message; //? to last inner msg? DGUI.BeginInvoke(() => { item.AsError(msg); infoModel.Errors.Add(msg); }); } } //+ childs foreach (var itemChild in item.Items) { CopyItemWithInners(DGUI, infoModel, itemChild, status, totalCount, stopWatch, ref currentIndex); } }
public void BatchRepair(string diagPath, string fileModel, string fileObs, string fileReal, BatchPlanner planner, double overhead, bool minCard, int maxNumOfDiag) //do it more generic { bool findDiagnoses = false; // List <Observation> observationsList = parser.ReadObsModelFiles(fileModel, fileObs); Dictionary <int, List <int> > obsReal = parser.ReadRealObsFiles(fileReal); if (observationsList == null || observationsList.Count == 0 || obsReal == null || obsReal.Count == 0) { return; } createNewDiagnoser(); SystemModel model = observationsList[0].TheModel; Dictionary <int, Gate> compDic = model.CreateCompDic(); Stopwatch stopwatch = new Stopwatch(); CSVExport myExport = new CSVExport(); foreach (Observation obs in observationsList) { if (!obsReal.ContainsKey(obs.Id)) { continue; } List <int> realComp = new List <int>(obsReal[obs.Id]); int counter = 0; double cost = 0; int numberOfFixed = 0; stopwatch.Start(); DiagnosisSet diagnoses = null; if (findDiagnoses) // { diagnoses = Diagnoser.FindDiagnoses(obs); } else { if (minCard) { string diagFileName = diagPath + model.Id + "_iscas85_" + obs.Id + ".all"; diagnoses = parser.ReadGroundedDiagnosesFile(diagFileName, compDic); } else { string diagFileName = diagPath + model.Id + "_" + obs.Id + "_Diag.txt"; diagnoses = parser.ReadDiagnosisFile(diagFileName, compDic); } } if (diagnoses.Count == 1) { continue; } if (maxNumOfDiag > 0 && diagnoses.Count > maxNumOfDiag) { continue; } SystemState state = new SystemState(new List <Comp>(model.Components)); state.Diagnoses = diagnoses; while (realComp.Count != 0) { if (state.Diagnoses != null && state.Diagnoses.Count != 0) { RepairAction repairAction = planner.Plan(state); if (repairAction != null) { counter++; cost += overhead; state.SetNextState(repairAction); foreach (Gate gate in repairAction.R) { cost += gate.Cost; numberOfFixed++; if (realComp.Contains(gate.Id)) { realComp.Remove(gate.Id); } } obs.TheModel.SetValue(obs.InputValues); if (realComp.Count == 0)//the system is fixed { planner.ExportIterationDetails(model.Id, obs.Id, counter, true); break; } else { planner.ExportIterationDetails(model.Id, obs.Id, counter, false); } foreach (int gid in realComp) { Gate g; if (compDic.TryGetValue(gid, out g)) { gateFunc.Operate(g); } } obs.OutputValues = model.GetValue(); DiagnosisSet newDiagnoses = new DiagnosisSet(); foreach (Diagnosis diag in state.Diagnoses.Diagnoses) { if (Diagnoser.IsConsistent(obs, diag)) { newDiagnoses.AddDiagnosis(diag); } } state.Diagnoses = newDiagnoses; } else { break; } } else { break; } } stopwatch.Stop(); if (realComp.Count > 0) { continue; } TimeSpan time = stopwatch.Elapsed; stopwatch.Reset(); myExport.AddRow(); myExport["System"] = model.Id; myExport["Algorithm"] = planner.Algorithm(); if (planner.Bounded) { myExport["Bound"] = planner.Bound; } else { myExport["Bound"] = "No Bound"; } myExport["Objective Function"] = planner.ObjectiveFunction(); myExport["Overhead"] = overhead; myExport["Observation"] = obs.Id; myExport["# Diagnoses"] = diagnoses.Count; myExport["Runtime"] = time; myExport["# Iterations"] = counter; myExport["Cost"] = cost; myExport["# Fixed Components"] = numberOfFixed; } string fileName = model.Id + "_" + planner.Type() + "_o=" + overhead; if (maxNumOfDiag > 0) { fileName += "_MaxDiag" + maxNumOfDiag; } myExport.ExportToFile(fileName + ".csv"); planner.CreateIterationDetailsFile(fileName + "_IterationDetails"); }
private void 沙尘能见度_Click(object sender, EventArgs e) { //首先进行沙尘判识,得出判识出的沙尘区域 string fname = @"E:\第二张盘\01_沙尘\2011年04月30日\FY3A_VIRRX_GBAL_L1_20110430_0305_1000M_MS_PRJ_DXX.LDF"; //Dictionary<string, object> args = new Dictionary<string, object>(); //args.Add("a", 28); //args.Add("b", 78); //args.Add("c", 245); //args.Add("d", 293); //args.Add("f", 0); //args.Add("g", 20); //args.Add("h", 15); //args.Add("i", 250); IRasterDataProvider prd = GeoDataDriver.Open(fname) as IRasterDataProvider; //ArgumentsProvider argprd = new ArgumentsProvider(prd, args); //string extractExpress = "((band2/10f) > var_a) && (band2/10f < var_b) && (band5/10f > var_c) && (band5/10f < var_d) && " // + "(band6/10f > var_a) && ((band6 - band2)>var_f) && ((band6/10f - band5/10f + var_i)>var_h)"; //band2:可见光,0.525~0.575(波长范围) //band5:远红外,10.3~11.55 //band6:短波红外,1.60~1.69 //int[] exBandNos = new int[] { 2, 5, 6 }; //IThresholdExtracter<UInt16> thrExtracter = new SimpleThresholdExtracter<UInt16>(); //thrExtracter.Reset(argprd, exBandNos, extractExpress); //extractResult = new MemPixelIndexMapper("SAND", 1000); //thrExtracter.Extract(extractResult); //idxs = extractResult.Indexes.ToArray(); //获取到判识结果 //将判识结果作为AOI传入进行能见度计算 string express = "(UInt16)Math.Round(1000 * Math.Pow(Math.E,(var_visibleA + var_visibleB * band1/10f + var_visibleC * band2/10f + var_visibleD * band6/10f + var_visibleE * band4/10f + var_visibleF *(band6/10f - band4/10f + var_shortFar))),0)"; //express = " 80 "; //string express = "1000*Math.Pow(Math.E,(44.7603 +0.181571 * band2/10f -0.332972 * band4/10f + 0.122736 * band6/10f -0.144287 * band5/10f -0.114465 *(band6/10f - band5/10f + 253)))"; Dictionary <string, object> args = new Dictionary <string, object>(); args.Add("visibleA", 44.7603); args.Add("visibleB", 0.181571); args.Add("visibleC", -0.332972); args.Add("visibleD", 0.122736); args.Add("visibleE", -0.144287); args.Add("visibleF", -0.114465); args.Add("shortFar", 253); int[] bandNos = new int[] { 1, 2, 6, 4 }; ArgumentProvider argProvider = new ArgumentProvider(prd, args); //argProvider.AOI = idxs; IRasterExtracter <UInt16, UInt16> extracter = new SimpleRasterExtracter <UInt16, UInt16>(); extracter.Reset(argProvider, bandNos, express); result = new MemPixelFeatureMapper <UInt16>("Visibility", 1000, new Size(prd.Width, prd.Height), prd.CoordEnvelope, prd.SpatialRef); Stopwatch sw = new Stopwatch(); sw.Start(); extracter.Extract(result); RasterIdentify id = new RasterIdentify(); id.ThemeIdentify = "CMA"; id.ProductIdentify = "SAND"; id.SubProductIdentify = "VISIBILITY"; id.Satellite = "FY3A"; id.Sensor = "VIRRX"; id.Resolution = "1000M"; id.OrbitDateTime = DateTime.Now.Subtract(new TimeSpan(1, 0, 0, 0, 0)); id.GenerateDateTime = DateTime.Now; IInterestedRaster <UInt16> iir = new InterestedRaster <UInt16>(id, new Size(prd.Width, prd.Height), prd.CoordEnvelope.Clone()); iir.Put(result); iir.Dispose(); sw.Stop(); Text = sw.ElapsedMilliseconds.ToString(); }
private async Task startWordImageExposition() { cts = new CancellationTokenSource(); try { configureImagePictureBox(); outputContent = new List <string>(); // restart elapsed miliseconds elapsedTime = 0; Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); await Task.Delay(currentTest.ProgramInUse.IntervalTime, cts.Token); // beginAudio if (currentTest.ProgramInUse.AudioCapture) { startRecordingAudio(); } // inicia gravação áudio // endAudio for (int counter = 0; counter < currentTest.ProgramInUse.NumExpositions && runExposition; counter++) { imgPictureBox.Visible = false; wordLabel.Visible = false; subtitleLabel.Visible = false; await intervalOrFixPoint(currentTest.ProgramInUse, cts.Token); // word exposition if (wordCounter == wordList.Count() - 1) { wordCounter = 0; } wordLabel.Text = wordList[wordCounter]; currentStimulus = wordLabel.Text; wordCounter++; elapsedTime = stopwatch.ElapsedMilliseconds; //sending event to program that is running on background, normally neuronspectrum SendKeys.SendWait(currentTest.Mark.ToString()); imgPictureBox.Visible = false; subtitleLabel.Visible = false; wordLabel.ForeColor = ColorTranslator.FromHtml(currentTest.ProgramInUse.WordColor); wordLabel.Visible = true; showSubtitle(); currentTest.writeLineOutputResult(currentStimulus, "false", counter + 1, outputContent, elapsedTime, "false"); await Task.Delay(currentTest.ProgramInUse.ExpositionTime, cts.Token); await Task.Delay(currentTest.ProgramInUse.DelayTime, cts.Token); //image exposition drawImage(); elapsedTime = stopwatch.ElapsedMilliseconds; //sending event to program that is running on background, normally neuronspectrum SendKeys.SendWait(currentTest.Mark.ToString()); imgPictureBox.Visible = true; showSubtitle(); wordLabel.Visible = false; currentTest.writeLineOutputResult(currentStimulus, "false", counter + 1, outputContent, elapsedTime, "false"); await Task.Delay(currentTest.ProgramInUse.ExpositionTime, cts.Token); imgPictureBox.Visible = false; await Task.Delay(currentTest.ProgramInUse.IntervalTime, cts.Token); } imgPictureBox.Visible = false; wordLabel.Visible = false; subtitleLabel.Visible = false; await Task.Delay(currentTest.ProgramInUse.IntervalTime, cts.Token); // beginAudio if (currentTest.ProgramInUse.AudioCapture) { stopRecordingAudio(); } } catch (TaskCanceledException) { if (currentTest.ProgramInUse.AudioCapture) { stopRecordingAudio(); } finishExposition(); } catch (Exception ex) { throw new Exception(ex.Message); } cts = null; }
private void buttonApprox_Click(object sender, EventArgs e) { //Слово для поиска string word = this.textBoxFind.Text.Trim(); //Если слово для поиска не пусто if (!string.IsNullOrWhiteSpace(word) && list.Count > 0) { int maxDist; if (!int.TryParse(this.textBoxMaxDist.Text.Trim(), out maxDist)) { MessageBox.Show("Необходимо указать максимальное расстояние"); return; } if (maxDist < 1 || maxDist > 5) { MessageBox.Show("Максимальное расстояние должно быть в диапазоне от 1 до 5"); return; } int ThreadCount; if (!int.TryParse(this.textBoxThreadCount.Text.Trim(), out ThreadCount)) { MessageBox.Show("Необходимо указать количество потоков"); return; } Stopwatch timer = new Stopwatch(); timer.Start(); //------------------------------------------------- // Начало параллельного поиска //------------------------------------------------- //Результирующий список List <ParallelSearchResult> Result = new List <ParallelSearchResult>(); //Деление списка на фрагменты для параллельного запуска в потоках List <MinMax> arrayDivList = SubArrays.DivideSubArrays(0, list.Count, ThreadCount); int count = arrayDivList.Count; //Количество потоков соответствует количеству фрагментов массива Task <List <ParallelSearchResult> >[] tasks = new Task <List <ParallelSearchResult> > [count]; //Запуск потоков for (int i = 0; i < count; i++) { //Создание временного списка, чтобы потоки не работали параллельно с одной коллекцией List <string> tempTaskList = list.GetRange(arrayDivList[i].Min, arrayDivList[i].Max - arrayDivList[i].Min); tasks[i] = new Task <List <ParallelSearchResult> >( //Метод, который будет выполняться в потоке ArrayThreadTask, //Параметры потока new ParallelSearchThreadParam() { tempList = tempTaskList, maxDist = maxDist, ThreadNum = i, wordPattern = word }); //Запуск потока tasks[i].Start(); } Task.WaitAll(tasks); timer.Stop(); //Объединение результатов for (int i = 0; i < count; i++) { Result.AddRange(tasks[i].Result); } //------------------------------------------------- // Завершение параллельного поиска //------------------------------------------------- timer.Stop(); //Вывод результатов //Время поиска this.textBoxApproxTime.Text = timer.Elapsed.ToString(); //Вычисленное количество потоков this.textBoxThreadCountAll.Text = count.ToString(); //Начало обновления списка результатов this.listBoxResult.BeginUpdate(); //Очистка списка this.listBoxResult.Items.Clear(); //Вывод результатов поиска foreach (var x in Result) { string temp = x.word + "(расстояние=" + x.dist.ToString() + " поток=" + x.ThreadNum.ToString() + ")"; this.listBoxResult.Items.Add(temp); } //Окончание обновления списка результатов this.listBoxResult.EndUpdate(); } else { MessageBox.Show("Необходимо выбрать файл и ввести слово для поиска"); } }