private void button1_Click(object sender, EventArgs e) { try { var fact = new MgdServiceFactory(); MgdRenderingService renSvc = (MgdRenderingService)fact.CreateService(MgServiceType.RenderingService); MgResourceIdentifier mdfId = new MgResourceIdentifier(txtMapDefinition.Text); var sw = new Stopwatch(); sw.Start(); MgdMap map = new MgdMap(mdfId); sw.Stop(); Trace.TraceInformation("Runtime map created in {0}ms", sw.ElapsedMilliseconds); map.SetViewScale(Convert.ToDouble(txtScale.Text)); sw.Reset(); sw.Start(); MgByteReader response = renSvc.RenderTile(map, txtBaseGroup.Text, Convert.ToInt32(txtCol.Text), Convert.ToInt32(txtRow.Text)); sw.Stop(); Trace.TraceInformation("RenderTile executed in {0}ms", sw.ElapsedMilliseconds); new ImageResponseDialog(response).ShowDialog(); } catch (MgException ex) { MessageBox.Show(ex.ToString(), "Error from MapGuide"); } catch (Exception ex) { MessageBox.Show(ex.ToString(), "Error"); } }
private static void Main() { // Testing different type of reverse algorithms Stopwatch timeTest = new Stopwatch(); Console.Write("Enter some string: "); string str = Console.ReadLine(); // Using StringBuilder timeTest.Start(); string reversed = ReverseSB(str); timeTest.Stop(); Console.WriteLine("Reverse text: {0}\ntime: {1} - StringBuilder class", reversed, timeTest.Elapsed); timeTest.Reset(); Console.WriteLine(); // Using Array.Reverse timeTest.Start(); string reversedArrayReverse = ReverseArray(str); timeTest.Stop(); Console.WriteLine("Reverse text: {0}\ntime: {1} - Array.Reverse", reversedArrayReverse, timeTest.Elapsed); timeTest.Reset(); Console.WriteLine(); // Using XOR timeTest.Start(); string reversedXor = ReverseXor(str); timeTest.Stop(); Console.WriteLine("Reverse text: {0}\ntime: {1} - XOR", reversedXor, timeTest.Elapsed); timeTest.Reset(); }
public void ComputeTimesPrimes() { Stopwatch w = new Stopwatch(); w.Start(); PrimeNumbers.GeneratePrimeNumbers1(100000); Console.WriteLine("Primes 1: " + w.ElapsedMilliseconds.ToString()); w.Stop(); w.Reset(); w.Start(); PrimeNumbers.GeneratePrimeNumbers2(100000); Console.WriteLine("Primes 2: "+ w.ElapsedMilliseconds.ToString()); w.Stop(); w.Reset(); w.Start(); PrimeNumbers.GeneratePrimeNumbers3(100000); Console.WriteLine("Primes 3: " + w.ElapsedMilliseconds.ToString()); w.Stop(); w.Start(); for (int i = 1; i <= 100000; i++) { int mod = i % 2; } w.Stop(); Console.WriteLine("Primes 4: " + w.ElapsedMilliseconds.ToString()); }
//Problem 72 //Consider the fraction, n/d, where n and d are positive integers. //If n<d and HCF(n,d)=1, it is called a reduced proper fraction. //If we list the set of reduced proper fractions for // d ≤ 8 in ascending order of size, we get: //1/8, 1/7, 1/6, 1/5, 1/4, 2/7, 1/3, 3/8, 2/5, 3/7, 1/2, 4/7, 3/5, //5/8, 2/3, 5/7, 3/4, 4/5, 5/6, 6/7, 7/8 //It can be seen that there are 21 elements in this set. //How many elements would be contained in the set of reduced proper //fractions for d ≤ 1,000,000? //303963552391 //End of Main (21:34:38.7624901) 4/1/2008 3:03:29 PM public static void Solve(int maxD) { Console.WriteLine("Solving For {0}", maxD); Stopwatch sw = new Stopwatch(); sw.Start(); decimal cnt = 0; for (int d = maxD; d>1; d--) { int step = (d%2 == 0) ? 2 : 1; for (int n = 1; n < d; n += step) { if (Divisors.GreatestCommonDivisor(n, d) == 1) { cnt++; } } if (d % (1+(maxD / 10000)) == 0) { Console.WriteLine("{0,8} {1} {2}", d, sw.Elapsed, cnt); sw.Reset(); sw.Start(); } } Console.WriteLine(); Console.WriteLine(cnt); }
// Use this for initialization void Start() { if (SVGFile != null) { Stopwatch w = new Stopwatch(); w.Reset(); w.Start(); ISVGDevice device; if(useFastButBloatedRenderer) device = new SVGDeviceFast(); else device = new SVGDeviceSmall(); m_implement = new Implement(this.SVGFile, device); w.Stop(); long c = w.ElapsedMilliseconds; w.Reset(); w.Start(); m_implement.StartProcess(); w.Stop(); long p = w.ElapsedMilliseconds; w.Reset(); w.Start(); renderer.material.mainTexture = m_implement.GetTexture(); w.Stop(); long r = w.ElapsedMilliseconds; UnityEngine.Debug.Log("Construction: " + Format(c) + ", Processing: " + Format(p) + ", Rendering: " + Format(r)); Vector2 ts = renderer.material.mainTextureScale; ts.x *= -1; renderer.material.mainTextureScale = ts; renderer.material.mainTexture.filterMode = FilterMode.Trilinear; } }
/// <summary> /// Verify if the user credentials are valid /// </summary> /// <param name="username">MAL Username</param> /// <param name="password">MAL Password</param> /// <returns></returns> public async Task<HttpResponseMessage> Get([FromUri] string username, [FromUri] string password) { var stopWatch = new Stopwatch(); stopWatch.Start(); Log.Information("Received credential verification request for {username}", username); bool result; try { result = await _credentialVerification.VerifyCredentials(username, password); } catch (UnauthorizedAccessException) { Log.Information("Received unauthorized - Credentials for {username} isn't valid", username); result = false; } catch (Exception ex) { Log.Error(ex, "An error occured while trying to validate user credentails"); result = false; } var response = Request.CreateResponse(HttpStatusCode.OK); response.Content = new StringContent($"Valid Credetials: {result}"); stopWatch.Start(); Log.Information("Verification completed for {username}. Processing took {duration}", username, stopWatch.Elapsed); return response; }
public void WillWaitForItem() { using (var queue = GetQueue()) { queue.DeleteQueue(); TimeSpan timeToWait = TimeSpan.FromSeconds(1); var sw = new Stopwatch(); sw.Start(); var workItem = queue.Dequeue(timeToWait); sw.Stop(); Trace.WriteLine(sw.Elapsed); Assert.Null(workItem); Assert.True(sw.Elapsed > timeToWait.Subtract(TimeSpan.FromMilliseconds(10))); Task.Factory.StartNewDelayed(100, () => queue.Enqueue(new SimpleWorkItem { Data = "Hello" })); sw.Reset(); sw.Start(); workItem = queue.Dequeue(timeToWait); workItem.Complete(); sw.Stop(); Trace.WriteLine(sw.Elapsed); Assert.NotNull(workItem); } }
/// <summary> /// Checks if there were any changes in document types defined in Umbraco/uSiteBuilder/Both. /// Does not check relations between document types (allowed childs and allowable parents) /// </summary> /// <param name="hasDefaultValues"></param> /// <returns></returns> public static List<ContentComparison> PreviewDocumentTypeChanges(out bool hasDefaultValues) { #if DEBUG Stopwatch timer = new Stopwatch(); timer.Start(); #endif // compare the library based definitions to the Umbraco DB var definedDocTypes = PreviewDocTypes(typeof(DocumentTypeBase), ""); #if DEBUG timer.Stop(); StopwatchLogger.AddToLog(string.Format("Total elapsed time for method 'DocumentTypeComparer.PreviewDocumentTypeChanges' - only PreviewDocTypes: {0}ms.", timer.ElapsedMilliseconds)); timer.Restart(); #endif #if DEBUG timer.Start(); #endif hasDefaultValues = _hasDefaultValues; // add any umbraco defined doc types that don't exist in the class definitions definedDocTypes.AddRange(ContentTypeService.GetAllContentTypes() .Where(doctype => definedDocTypes.All(dd => dd.Alias != doctype.Alias)) .Select(docType => new ContentComparison { Alias = docType.Alias, DocumentTypeStatus = Status.Deleted, DocumentTypeId = docType.Id })); #if DEBUG timer.Stop(); StopwatchLogger.AddToLog(string.Format("Total elapsed time for method 'DocumentTypeComparer.PreviewDocumentTypeChanges' - add any umbraco defined doc types that don't exist in the class definitions: {0}ms.", timer.ElapsedMilliseconds)); timer.Restart(); #endif return definedDocTypes; }
private static void Main(string[] args) { //List<int> A = new List<int>(); //List<int> B = new List<int>(); //Random rand = new Random(); //for (int i = 0; i < 40000; i++) //{ // A.Add(rand.Next(400000000)); // B.Add(rand.Next(400000000)); //} //A.Sort(); //B.Sort(); int[] perm = new int[100000]; for (var a = 1; a < 100001; a++) { perm[a - 1] = a; } TimeComplexity tc = new TimeComplexity(); Stopwatch timer = new Stopwatch(); timer.Start(); //var test = solution(400000000, 400000000, 3, A.ToArray(), B.ToArray()); //var test = solution2(new[] {3, 1, 2, 4, 3}); //var test = FrogJmp(10, 1000000000, 30); var test = tc.ElementMissing(perm); timer.Start(); Console.WriteLine(test); Console.WriteLine(timer.Elapsed); }
public void ShouldPerformFasterThanActivator() { // warmup for (var i = 0; i < 10; ++i) { Activator.CreateInstance<ClassWithDefaultConstuctor>(); ReflectionHelpers.CreateInstance<ClassWithDefaultConstuctor>(); } // warmup var count = 0; var stopWatch = new Stopwatch(); stopWatch.Start(); for (var i = 0; i < 1000000; ++i) { count += ReflectionHelpers.CreateInstance<ClassWithDefaultConstuctor>().Value; } stopWatch.Stop(); var creatorTime = stopWatch.Elapsed; stopWatch.Reset(); stopWatch.Start(); for (var i = 0; i < 1000000; ++i) { count += Activator.CreateInstance<ClassWithDefaultConstuctor>().Value; } stopWatch.Stop(); var activator = stopWatch.Elapsed; Assert.IsTrue(creatorTime < activator); Assert.AreEqual(2000000, count); }
//Double operations public static TimeSpan MesureDoubleOperationsPerformance(double num, string operation, Stopwatch stopwatch) { stopwatch.Reset(); switch (operation) { case "square root": { stopwatch.Start(); Math.Sqrt(num); stopwatch.Stop(); return stopwatch.Elapsed; } case "natural logarithm": { stopwatch.Start(); Math.Log(num); stopwatch.Stop(); return stopwatch.Elapsed; } case "sinus": { stopwatch.Start(); Math.Sin(num); stopwatch.Stop(); return stopwatch.Elapsed; } default: throw new ArgumentException("Invalid operations"); } }
// runs an iterative deepening minimax search limited by the given timeLimit public Move IterativeDeepening(State state, double timeLimit) { int depth = 1; Stopwatch timer = new Stopwatch(); Move bestMove = null; // start the search timer.Start(); while (true) { if (timer.ElapsedMilliseconds > timeLimit) { if (bestMove == null) // workaround to overcome problem with timer running out too fast with low limits { timeLimit += 10; timer.Reset(); timer.Start(); } else { return bestMove; } } Tuple<Move, Boolean> result = IterativeDeepeningAlphaBeta(state, depth, Double.MinValue, Double.MaxValue, timeLimit, timer); if (result.Item2) bestMove = result.Item1; // only update bestMove if full recursion depth++; } }
public static void TestProgram() { var sw = new Stopwatch(); sw.Start(); Console.WriteLine("Fi(30) = {0} - slow", CalculateNthFi(30)); sw.Stop(); Console.WriteLine("Calculated in {0}", sw.ElapsedMilliseconds); sw.Reset(); sw.Start(); Console.WriteLine("Fi(30) = {0} - fast", CalculateNthFi2(30)); sw.Stop(); Console.WriteLine("Calculated in {0}", sw.ElapsedMilliseconds); sw.Reset(); sw.Start(); Console.WriteLine("Fi(30) = {0} - fast2", CalculateNthFi3(30, 0, 1, 1)); sw.Stop(); Console.WriteLine("Calculated in {0}", sw.ElapsedMilliseconds); Console.WriteLine(""); }
private void btnInterpretate_Click(object sender, EventArgs e) { try { Stopwatch timer = new Stopwatch(); RegularExpression r; timer.Reset(); timer.Start(); r = new RegularExpression(txtRegEx.Text); timer.Stop(); ReportResult("Parsing '" + txtRegEx.Text + "'", "SUCCESS", r.IsCompiled, timer); timer.Reset(); timer.Start(); bool result = r.IsMatch(txtInput.Text); timer.Stop(); ReportResult("Matching '" + txtInput.Text + "'", result.ToString(), r.IsCompiled, timer); ReportData("Original Expression:\t" + r.OriginalExpression + "\r\nInfix Expression:\t" + r.FormattedExpression + "\r\nPostfix string:\t" + r.PostfixExpression + "\r\n\r\nNon Deterministic Automata has\t\t" + r.NDStateCount + " states.\r\nDeterministic Automata has\t\t" + r.DStateCount + " states.\r\nOptimized Deterministic Automata has\t" + r.OptimizedDStateCount + " states."); automataViewer1.Initialize(r); } catch (RegularExpressionParser.RegularExpressionParserException exc) { ReportError("PARSER ERROR", exc.ToString()); } catch (Exception exc) { ReportError("EXCEPTION", exc.ToString()); } }
public void CopyConstructorSpeed() { var random = new Random(); var values = new double[5000000]; for (var i = 0; i < 5000000; i++) { values[i] = random.Next(); } var scalarSet = new ScalarSet(values); // copying values var stopwatch = new Stopwatch(); stopwatch.Start(); values.Clone(); stopwatch.Stop(); var copyArrayTime = stopwatch.ElapsedMilliseconds; Trace.WriteLine("Copying array with 1M values took: " + copyArrayTime + " ms"); stopwatch.Reset(); stopwatch.Start(); new ScalarSet(scalarSet); stopwatch.Stop(); Trace.WriteLine("Copying scalar set with 1M values took: " + stopwatch.ElapsedMilliseconds + " ms"); var fraction = stopwatch.ElapsedMilliseconds/copyArrayTime; Assert.IsTrue(fraction < 1.1); }
static void Main(string[] args) { Bootstrapper bootstrapper = Bootstrapper.Create() .RegisterInstaller(new ResourceBuilderInstaller()) .RegisterInstaller(new StructInstaller()) .RegisterInstaller(new ServiceInstaller()); string chittinKeyPath = Path.Combine(@"C:\Program Files (x86)\Baldur's Gate Enhanced Edition\Data\00766", "CHITIN.KEY"); string dialogPath = Path.Combine(@"C:\Program Files (x86)\Baldur's Gate Enhanced Edition\Data\data\lang\en_US", "dialog.tlk"); var resourceFileProvider = bootstrapper.WindsorContainer.Resolve<IResourceFileProvider>(); byte[] contentOfFile = resourceFileProvider.GetByteContentOfFile(chittinKeyPath); IKeyResourceBuilder keyResourceBuilder = bootstrapper.WindsorContainer.Resolve<IKeyResourceBuilder>(); //IDlgResourceBuilder dlgResourceBuilder = bootstrapper.WindsorContainer.Resolve<IDlgResourceBuilder>(); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); KeyResource keyResource = keyResourceBuilder.BuildKeyResource(contentOfFile); stopwatch.Stop(); Console.WriteLine("Miliseconds : {0} - Ticks {1}", stopwatch.ElapsedMilliseconds, stopwatch.ElapsedTicks); stopwatch.Reset(); stopwatch.Start(); KeyResource buildKeyResourceNew = keyResourceBuilder.BuildKeyResourceNew(contentOfFile); stopwatch.Stop(); Console.WriteLine("Miliseconds : {0} - Ticks {1}", stopwatch.ElapsedMilliseconds, stopwatch.ElapsedTicks); Console.ReadLine(); }
private void Form1_Load(object sender, EventArgs e) { asdassd.Add("MenuGetir", () => new MyClass()); Stopwatch sw = new Stopwatch(); sw.Start(); for (int i = 0; i < 1000000; i++) { MyClass c = new MyClass(); } sw.Stop(); MessageBox.Show(sw.ElapsedMilliseconds.ToString()); sw.Reset(); Type t = typeof(MyClass); sw.Start(); for (int i = 0; i < 1000000; i++) { var c = System.Activator.CreateInstance(Type.GetType(t.FullName)); } sw.Stop(); MessageBox.Show(sw.ElapsedMilliseconds.ToString()); sw.Reset(); sw.Start(); for (int i = 0; i < 1000000; i++) { var c = asdassd["MenuGetir"](); } sw.Stop(); MessageBox.Show(sw.ElapsedMilliseconds.ToString()); }
public void ECPerformanceTest() { Stopwatch sw = new Stopwatch(); int timesofTest = 1000; string[] timeElapsed = new string[2]; string testCase = "sdg;alwsetuo1204985lkscvzlkjt;"; sw.Start(); for(int i = 0; i < timesofTest; i++) { this.Encode(testCase, 3); } sw.Stop(); timeElapsed[0] = sw.ElapsedMilliseconds.ToString(); sw.Reset(); sw.Start(); for(int i = 0; i < timesofTest; i++) { this.ZXEncode(testCase, 3); } sw.Stop(); timeElapsed[1] = sw.ElapsedMilliseconds.ToString(); Assert.Pass("EC performance {0} Tests~ QrCode.Net: {1} ZXing: {2}", timesofTest, timeElapsed[0], timeElapsed[1]); }
public void BinaryStressTest() { var b = new Truck("MAN"); var upper = Math.Pow(10, 3); var sw = new Stopwatch(); sw.Start(); b.LoadCargo(BinaryTestModels.ToList()); sw.Stop(); var secondElapsedToAdd = sw.ElapsedMilliseconds; Trace.WriteLine(string.Format("Put on the Channel {1} items. Time Elapsed: {0}", secondElapsedToAdd, upper)); sw.Reset(); sw.Start(); b.DeliverTo("Dad"); sw.Stop(); var secondElapsedToBroadcast = sw.ElapsedMilliseconds ; Trace.WriteLine(string.Format("Broadcast on the Channel {1} items. Time Elapsed: {0}", secondElapsedToBroadcast, upper)); var elem = b.UnStuffCargo<List<BinaryTestModel>>().First(); Assert.AreEqual(elem.Count(), 1000, "Not every elements have been broadcasted"); Assert.IsTrue(secondElapsedToAdd < 5000, "Add took more than 5 second. Review the logic, performance must be 10000 elems in less than 5 sec"); Assert.IsTrue(secondElapsedToBroadcast < 3000, "Broadcast took more than 3 second. Review the logic, performance must be 10000 elems in less than 5 sec"); }
internal static void Main() { var stopwatch = new Stopwatch(); var context = new TelerikAcademyEntities(); stopwatch.Start(); var employees = context.Employees; foreach (var employee in employees) { Console.WriteLine( "{0}, {1}, {2}", (employee.FirstName + ' ' + employee.LastName).PadLeft(30), employee.Department.Name.PadLeft(30), employee.Address.Town.Name.PadLeft(15)); } stopwatch.Stop(); Console.WriteLine("Time elapsed: {0} seconds", (decimal)stopwatch.Elapsed.Milliseconds / 1000); stopwatch.Reset(); stopwatch.Start(); var includeEmployees = context.Employees.Include("Address").Include("Department"); foreach (var employee in includeEmployees) { Console.WriteLine( "{0}, {1}, {2}", (employee.FirstName + ' ' + employee.LastName).PadLeft(30), employee.Department.Name.PadLeft(30), employee.Address.Town.Name.PadLeft(15)); } stopwatch.Stop(); Console.WriteLine("Time elapsed: {0} seconds", (decimal)stopwatch.Elapsed.Milliseconds / 1000); }
public static void run(Action testMethod, int rounds){ Stopwatch stopwatch = new Stopwatch(); stopwatch.Reset(); stopwatch.Start(); while (stopwatch.ElapsedMilliseconds < 1200) // A Warmup of 1000-1500 mS // stabilizes the CPU cache and pipeline. { testMethod(); // Warmup clearMemory (); } stopwatch.Stop(); long totalmem; Console.WriteLine ("Round;Runtime ms;Memory KB"); for (int repeat = 0; repeat < rounds; ++repeat) { stopwatch.Reset(); stopwatch.Start(); testMethod(); stopwatch.Stop(); totalmem = getUsedMemoryKB (); clearMemory (); Console.WriteLine((1+repeat)+";"+stopwatch.ElapsedMilliseconds + ";" +totalmem); } }
public void TestCutLargeFile() { var weiCheng = File.ReadAllText(@"Resources\围城.txt"); var seg = new JiebaSegmenter(); seg.Cut("热身"); Console.WriteLine("Start to cut"); var n = 20; var stopWatch = new Stopwatch(); // Accurate mode stopWatch.Start(); for (var i = 0; i < n; i++) { seg.Cut(weiCheng); } stopWatch.Stop(); Console.WriteLine("Accurate mode: {0} ms", stopWatch.ElapsedMilliseconds / n); // Full mode stopWatch.Reset(); stopWatch.Start(); for (var i = 0; i < n; i++) { seg.Cut(weiCheng, true); } stopWatch.Stop(); Console.WriteLine("Full mode: {0} ms", stopWatch.ElapsedMilliseconds / n); }
static void Main(string[] args) { Stopwatch sw1 = new Stopwatch(); sw1.Start(); using (var db = new DB.MSSQL.courseMSSQLEntities()) { foreach (var i in db.Courses) { Console.WriteLine(i.NAME); } } sw1.Stop(); Console.WriteLine("Using {0} miniseconds.", sw1.ElapsedMilliseconds); sw1.Reset(); sw1.Start(); using (var db = new DB.MySQL.courseMySQLEntities()) { foreach (var i in db.courses) { Console.WriteLine(i.NAME); } } sw1.Stop(); Console.WriteLine("Using {0} miniseconds.", sw1.ElapsedMilliseconds); }
private static void RunWithContext(string topic, int iterations, Federation federation) { using (RequestContext.Create()) { Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); string content = federation.GetTopicFormattedContent(new QualifiedTopicRevision(topic), null); stopwatch.Stop(); Console.WriteLine("Rendered first times in {0} seconds", stopwatch.ElapsedMilliseconds / 1000.0F); stopwatch.Reset(); stopwatch.Start(); content = federation.GetTopicFormattedContent(new QualifiedTopicRevision(topic), null); stopwatch.Stop(); Console.WriteLine("Rendered second time in {0} seconds", stopwatch.ElapsedMilliseconds / 1000.0F); stopwatch.Reset(); stopwatch.Start(); content = federation.GetTopicFormattedContent(new QualifiedTopicRevision(topic), null); stopwatch.Stop(); Console.WriteLine("Rendered third time in {0} seconds", stopwatch.ElapsedMilliseconds / 1000.0F); } }
static void Main(string[] args) { InputLoader loader = new InputLoader(); loader.LoadFile("digits.csv"); Stopwatch sw = new Stopwatch(); var heursiticDetection = new HeuristicDetection(10, 5, quantity:50, numberOfPoints:500); var hypothesis = new CurrentHypothesis(); foreach (var input in loader.AllElements()) { ///For every new input we extract n points of interest ///And create a feature vector which characterizes the spatial relationship between these features ///For every heuristic we get a dictionary of points of interest DetectedPoints v = heursiticDetection.getFeatureVector(input.Item1); ///Compare this feature vector agaist each of the other feature vectors we know about sw.Reset(); sw.Start(); TestResult r = hypothesis.Predict(v); Debug.Print("Prediction: " + sw.Elapsed.Milliseconds.ToString()); var best= r.BestResult(); if(best != null && best.Item2 != 0){ LogProgress(best.Item1, input.Item2); } sw.Reset(); sw.Start(); hypothesis.Train(v, input.Item2, r); Debug.Print("Training: " + sw.Elapsed.Milliseconds.ToString()); //heursiticDetection.pointsOfInterest.Add(HeuristicDetection.Generate(10, 5, 10)); } }
private int FindAndSaveApps(ICollection<int> partition) { ICollection<App> apps = appParser.RetrieveApps(partition); if (apps == null) { return 0; } Stopwatch watch = new Stopwatch(); watch.Start(); foreach (App app in apps) { repository.App.Save(app); indexer.AddApp(app); } watch.Stop(); logger.Debug("Saved {0} apps using {1}ms", apps.Count, watch.ElapsedMilliseconds); watch.Reset(); watch.Start(); indexer.Flush(); watch.Stop(); logger.Debug("Indexed {0} apps using {1}ms", apps.Count, watch.ElapsedMilliseconds); return apps.Count; }
static void Main(string[] args) { string data = File.ReadAllText(@"....."); Stopwatch watch = new Stopwatch(); for (int i = 0; i < 20; i++) { watch.Start(); Newtonsoft.Json.Linq.JObject jObj = Newtonsoft.Json.Linq.JObject.Parse(data); watch.Stop(); Console.WriteLine(watch.Elapsed); watch.Reset(); } Console.WriteLine(" "); GC.Collect(); for (int i = 0; i < 20; i++) { watch.Start(); JParser parser = new JParser(); JToken token = parser.Parse(data); watch.Stop(); Console.WriteLine(watch.Elapsed); watch.Reset(); } Console.WriteLine(" "); }
public void Test_perf_of_query_without_index() { OdbFactory.Delete("index1perf.ndb"); using (var odb = OdbFactory.Open("index1perf.ndb")) { for (var i = 0; i < 5000; i++) { var player = new Player("Player" + i, DateTime.Now, new Sport("Sport" + i)); odb.Store(player); } } var stopwatch = new Stopwatch(); stopwatch.Start(); using (var odb = OdbFactory.OpenLast()) { var query = odb.Query<Player>(); query.Descend("Name").Constrain((object) "Player20").Equal(); var count = query.Execute<Player>().Count; Assert.That(count, Is.EqualTo(1)); } stopwatch.Stop(); Console.WriteLine("Elapsed {0} ms", stopwatch.ElapsedMilliseconds); stopwatch.Reset(); stopwatch.Start(); using (var odb = OdbFactory.OpenLast()) { var query = odb.Query<Player>(); query.Descend("Name").Constrain((object) "Player1234").Equal(); var count = query.Execute<Player>().Count; Assert.That(count, Is.EqualTo(1)); } stopwatch.Stop(); Console.WriteLine("Elapsed {0} ms", stopwatch.ElapsedMilliseconds); stopwatch.Reset(); stopwatch.Start(); using (var odb = OdbFactory.OpenLast()) { var query = odb.Query<Player>(); query.Descend("Name").Constrain((object) "Player4444").Equal(); var count = query.Execute<Player>().Count; Assert.That(count, Is.EqualTo(1)); } stopwatch.Stop(); Console.WriteLine("Elapsed {0} ms", stopwatch.ElapsedMilliseconds); stopwatch.Reset(); stopwatch.Start(); using (var odb = OdbFactory.OpenLast()) { var query = odb.Query<Player>(); query.Descend("Name").Constrain((object) "Player3211").Equal(); var count = query.Execute<Player>().Count; Assert.That(count, Is.EqualTo(1)); } stopwatch.Stop(); Console.WriteLine("Elapsed {0} ms", stopwatch.ElapsedMilliseconds); }
public void Do() { Console.WriteLine("the linklist:"); Stopwatch watch = new Stopwatch(); watch.Start(); { Init(); { for (int k = 0; k < 10000; k++) { Linklist.Remove(k); } } } watch.Stop(); Console.WriteLine(watch.ElapsedMilliseconds); watch.Reset(); watch.Start(); { Init(); { for (int k = 0; k < 10000; k++) { _List.Remove(k); } } } watch.Stop(); Console.WriteLine(watch.ElapsedMilliseconds); Console.ReadLine(); }
public static void Main() { Stopwatch watch = new Stopwatch(); Random rand = new Random(); watch.Start(); for (int i = 0; i < iterations; i++) DayOfYear1(rand.Next(1, 13), rand.Next(1, 29)); watch.Stop(); Console.WriteLine("Local array: " + watch.Elapsed); watch.Reset(); watch.Start(); for (int i = 0; i < iterations; i++) DayOfYear2(rand.Next(1, 13), rand.Next(1, 29)); watch.Stop(); Console.WriteLine("Static array: " + watch.Elapsed); // trying to modify static int [] daysCumulativeDays[0] = 18; foreach (int days in daysCumulativeDays) { Console.Write("{0}, ", days); } Console.WriteLine(""); // MY_STR_CONST = "NOT CONST"; }
private PingReply SendCancellablePing( IPAddress targetAddress, int timeout, byte[] buffer, PingOptions pingOptions, Stopwatch?timer = null) { try { _sender = new Ping(); timer?.Start(); // 'SendPingAsync' always uses the default synchronization context (threadpool). // This is what we want to avoid the deadlock resulted by async work being scheduled back to the // pipeline thread due to a change of the current synchronization context of the pipeline thread. return(_sender.SendPingAsync(targetAddress, timeout, buffer, pingOptions).GetAwaiter().GetResult()); } catch (PingException ex) when(ex.InnerException is TaskCanceledException) { // The only cancellation we have implemented is on pipeline stops via StopProcessing(). throw new PipelineStoppedException(); } finally { timer?.Stop(); _sender?.Dispose(); _sender = null; } }
private PingReply SendCancellablePing( IPAddress targetAddress, int timeout, byte[] buffer, PingOptions pingOptions, Stopwatch?timer = null) { try { _sender = new Ping(); _sender.PingCompleted += OnPingComplete; timer?.Start(); _sender.SendAsync(targetAddress, timeout, buffer, pingOptions, this); _pingComplete.Wait(); timer?.Stop(); _pingComplete.Reset(); if (_pingCompleteArgs == null) { throw new PingException(string.Format( TestConnectionResources.NoPingResult, targetAddress, IPStatus.Unknown)); } if (_pingCompleteArgs.Cancelled) { // The only cancellation we have implemented is on pipeline stops via StopProcessing(). throw new PipelineStoppedException(); } if (_pingCompleteArgs.Error != null) { throw new PingException(_pingCompleteArgs.Error.Message, _pingCompleteArgs.Error); } return(_pingCompleteArgs.Reply); } finally { _sender?.Dispose(); _sender = null; } }
public Day18(string input) { stopWatch.Start(); Input = input.Replace("\r\n", ""); }
public override bool Execute() { bool isSuccess = true; Stopwatch watch = null; OutputSubscribe(); IOutputService output = CommonServices.Output; output.StartSection("Review objects"); try { watch = new Stopwatch(); watch.Start(); if (KB == null) { output.AddErrorLine("No hay ninguna KB abierta en el contexto actual, asegúrese de incluír la tarea OpenKnowledgeBase antes de ejecutar la revisión de objetos."); isSuccess = false; } else { output.AddLine("KBDoctor", DateFrom); List <KBObject> objects = new List <KBObject>(); DateTime dt; if (DateFrom != null) { dt = DateTime.ParseExact(DateFrom, "dd-MM-yyyy", System.Globalization.CultureInfo.InvariantCulture); } else { DateTime ayer = DateTime.Today.AddDays(-1); dt = ayer; } foreach (KBObject obj in KB.DesignModel.Objects.GetAll()) { if (DateTime.Compare(obj.Timestamp, dt) >= 0) { objects.Add(obj); } } output.AddLine("Review objects from " + dt.ToString()); List <string[]> lines = new List <string[]>(); double cant; API.PreProcessPendingObjects(KB, output, objects, out lines, out cant); lines.Clear(); } } catch (Exception e) { output.AddErrorLine(e.Message); isSuccess = false; } finally { output.EndSection("Review objects", isSuccess); OutputUnsubscribe(); } return(isSuccess); }
/// <summary> /// Initializes a new instance of the <see cref="BenchmarkUnit" /> class. /// </summary> /// <param name="identifier">The identifier.</param> public BenchmarkUnit(string identifier) { _identifier = identifier; _stopwatch?.Start(); }
IEnumerable StringifyAsync(int depth, StringBuilder builder, bool pretty = false) //Convert the JSONObject into a string //Profiler.BeginSample("JSONprint"); { if (depth++ > MAX_DEPTH) { #if UNITY_2 || UNITY_3 || UNITY_4 || UNITY_5 || UNITY_5_4_OR_NEWER Debug.Log #else Debug.WriteLine #endif ("reached max depth!"); yield break; } if (printWatch.Elapsed.TotalSeconds > maxFrameTime) { printWatch.Reset(); yield return(null); printWatch.Start(); } switch (type) { case Type.BAKED: builder.Append(str); break; case Type.STRING: builder.AppendFormat("\"{0}\"", str); break; case Type.NUMBER: if (useInt) { builder.Append(i.ToString()); } else { #if USEFLOAT if (float.IsInfinity(n)) { builder.Append(INFINITY); } else if (float.IsNegativeInfinity(n)) { builder.Append(NEGINFINITY); } else if (float.IsNaN(n)) { builder.Append(NaN); } #else if (double.IsInfinity(n)) { builder.Append(INFINITY); } else if (double.IsNegativeInfinity(n)) { builder.Append(NEGINFINITY); } else if (double.IsNaN(n)) { builder.Append(NaN); } #endif else { builder.Append(n.ToString()); } } break; case Type.OBJECT: builder.Append("{"); if (list.Count > 0) { #if (PRETTY) //for a bit more readability, comment the define above to disable system-wide if (pretty) { builder.Append(NEWLINE); } #endif for (int i = 0; i < list.Count; i++) { string key = keys[i]; JSONObject obj = list[i]; if (obj) { #if (PRETTY) if (pretty) { for (int j = 0; j < depth; j++) { builder.Append("\t"); //for a bit more readability } } #endif builder.AppendFormat("\"{0}\":", key); foreach (IEnumerable e in obj.StringifyAsync(depth, builder, pretty)) { yield return(e); } builder.Append(","); #if (PRETTY) if (pretty) { builder.Append(NEWLINE); } #endif } } #if (PRETTY) if (pretty) { builder.Length -= 2; } else #endif builder.Length--; } #if (PRETTY) if (pretty && list.Count > 0) { builder.Append(NEWLINE); for (int j = 0; j < depth - 1; j++) { builder.Append("\t"); //for a bit more readability } } #endif builder.Append("}"); break; case Type.ARRAY: builder.Append("["); if (list.Count > 0) { #if (PRETTY) if (pretty) { builder.Append(NEWLINE); //for a bit more readability } #endif for (int i = 0; i < list.Count; i++) { if (list[i]) { #if (PRETTY) if (pretty) { for (int j = 0; j < depth; j++) { builder.Append("\t"); //for a bit more readability } } #endif foreach (IEnumerable e in list[i].StringifyAsync(depth, builder, pretty)) { yield return(e); } builder.Append(","); #if (PRETTY) if (pretty) { builder.Append(NEWLINE); //for a bit more readability } #endif } } #if (PRETTY) if (pretty) { builder.Length -= 2; } else #endif builder.Length--; } #if (PRETTY) if (pretty && list.Count > 0) { builder.Append(NEWLINE); for (int j = 0; j < depth - 1; j++) { builder.Append("\t"); //for a bit more readability } } #endif builder.Append("]"); break; case Type.BOOL: if (b) { builder.Append("true"); } else { builder.Append("false"); } break; case Type.NULL: builder.Append("null"); break; } //Profiler.EndSample(); }
/// <summary> /// Run the tally for the specified quest. /// </summary> /// <param name="quest">The quest to scan.</param> /// <param name="token">Cancellation token.</param> public async Task RunAsync(IQuest quest, CancellationToken token) { if (quest == null) { throw new ArgumentNullException(nameof(quest)); } try { TallyIsRunning = true; TallyResults = string.Empty; // Mark the quest as one that we will listen for changes from. quest.PropertyChanged -= Quest_PropertyChanged; quest.PropertyChanged += Quest_PropertyChanged; voteCounter.ResetUserDefinedTasks(quest.DisplayName); using (var forumReader = serviceProvider.GetRequiredService <ForumReader>()) { try { forumReader.StatusChanged += ForumReader_StatusChanged; var(threadTitles, posts) = await forumReader.ReadQuestAsync(quest, token).ConfigureAwait(false); voteCounter.SetThreadTitles(threadTitles); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); await TallyPosts(posts, quest, token).ConfigureAwait(false); stopwatch.Stop(); logger.LogDebug($"Time to process posts: {stopwatch.ElapsedMilliseconds} ms."); } finally { forumReader.StatusChanged -= ForumReader_StatusChanged; } } } catch (InvalidOperationException e) { TallyResults += $"\n{e.Message}"; return; } catch (OperationCanceledException) { throw; } catch (Exception) { //VoteCounter.Instance.Quest = null; throw; } finally { TallyIsRunning = false; // Free memory used by loading pages as soon as we're done: GC.Collect(); } await UpdateResults(token).ConfigureAwait(false); }
static void Main(string[] args) { Thread.CurrentThread.Priority = ThreadPriority.Highest; // スレッドの優先度を上げておく // 画面リフレッシュレートと目標フレームレートが等しい場合は垂直同期を有効に、等しくない場合は垂直同期を無効にする DX.SetWaitVSyncFlag(DX.GetRefreshRate() == TargetFPS ? DX.TRUE : DX.FALSE); // ウィンドウのタイトル #if DEBUG DX.SetWindowText("Surusuri(デバッグ)"); #else DX.SetWindowText("Surusuri"); #endif DX.SetGraphMode((int)Screen.Size.X, (int)Screen.Size.Y, 32); // ウィンドウサイズ(画面解像度)の指定 // フルスクリーンの可否 if (Screen.FullScreen) { DX.ChangeWindowMode(DX.FALSE); // フルスクリーン } else { DX.ChangeWindowMode(DX.TRUE); // ノーフルスクリーン } DX.SetAlwaysRunFlag(DX.TRUE); // ウィンドウが非アクティブでも動作させる DX.DxLib_Init(); // DXライブラリの初期化 DX.SetMouseDispFlag(DX.TRUE); // マウスを表示する(DX.FALSEを指定すると非表示になる) DX.SetDrawScreen(DX.DX_SCREEN_BACK); // 描画先を裏画面とする(ダブルバッファ) game = new Game(); game.Init(); DX.ScreenFlip(); stopwatch.Start(); while (DX.ProcessMessage() == 0) // ウィンドウが閉じられるまで繰り返す { // FPSの計測 fpsFrameCount++; if (fpsFrameCount >= 60) { long elapsedTicks = stopwatch.Elapsed.Ticks - fpsTicks; float elapsedSec = elapsedTicks / 10000000f; CurrentFPS = fpsFrameCount / elapsedSec; fpsFrameCount = 0; fpsTicks = stopwatch.Elapsed.Ticks; } game.Update(); if (DX.GetWaitVSyncFlag() == DX.TRUE) { if (EnableFrameSkip) { long waitTicks = nextFrameTicks - stopwatch.Elapsed.Ticks; // 余った時間 if (waitTicks < 0) // 目標時刻をオーバーしている { if (skipCount < MaxAllowSkipCount) // 連続フレームスキップ数が最大スキップ数を超えていなければ { skipCount++; // フレームスキップ(描画処理を飛ばす) } else { // 最大スキップ数を超えてるので、フレームスキップしないで描画 nextFrameTicks = stopwatch.Elapsed.Ticks; Draw(); } } else { Draw(); } nextFrameTicks += IntervalTicks; } else { Draw(); } } else { long waitTicks = nextFrameTicks - stopwatch.Elapsed.Ticks; // 余った時間(待機が必要な時間) if (EnableFrameSkip && waitTicks < 0) { if (skipCount < MaxAllowSkipCount) { skipCount++; // フレームスキップ(描画処理を飛ばす) } else { nextFrameTicks = stopwatch.Elapsed.Ticks; Draw(); } } else { if (waitTicks > 20000) // あと2ミリ秒以上待つ必要がある { // Sleep()は指定した時間でピッタリ戻ってくるわけではないので、 // 余裕を持って、「待たなければならない時間-2ミリ秒」Sleepする int waitMillsec = (int)(waitTicks / 10000) - 2; Thread.Sleep(waitMillsec); } // 時間が来るまで何もしないループを回して待機する while (stopwatch.Elapsed.Ticks < nextFrameTicks) { } Draw(); } nextFrameTicks += IntervalTicks; } } DX.DxLib_End(); // DXライブラリ終了処理 }
/// <summary> /// The CopyProgressRoutine delegate is an application-defined callback function used with the CopyFileEx and MoveFileWithProgress functions. It is called when a portion of a copy or move operation is completed. /// </summary> /// <param name="totalFileSize">Total size of the file, in bytes.</param> /// <param name="totalBytesTransferred">Total number of bytes transferred from the source file to the destination file since the copy operation began.</param> /// <param name="streamSize">Total size of the current file stream, in bytes.</param> /// <param name="streamBytesTransferred">Total number of bytes in the current stream that have been transferred from the source file to the destination file since the copy operation began.</param> /// <param name="dwStreamNumber">Handle to the current stream. The first time CopyProgressRoutine is called, the stream number is 1.</param> /// <param name="dwCallbackReason">Reason that CopyProgressRoutine was called.</param> /// <param name="hSourceFile">Handle to the source file.</param> /// <param name="hDestinationFile">Handle to the destination file.</param> /// <param name="lpData">Argument passed to CopyProgressRoutine by the CopyFileEx or MoveFileWithProgress function.</param> /// <returns> A value indicating how to proceed with the copy operation. </returns> protected uint CopyProgressCallback( long totalFileSize, long totalBytesTransferred, long streamSize, long streamBytesTransferred, uint dwStreamNumber, uint dwCallbackReason, IntPtr hSourceFile, IntPtr hDestinationFile, IntPtr lpData) { if (_log.IsDebugEnabled) { _log.DebugFormat("Finished chunk for file ..."); } switch (dwCallbackReason) { case CallbackChunkFinished: // Another part of the file was copied. // Capture the time TimeSpan elapsedTime = _fileChunkStopwatch.Elapsed; // Reset the stopwtach and keep counting _fileChunkStopwatch.Reset(); _fileChunkStopwatch.Start(); // Calculcate what was copied since last time long bytesCopiedSinceLastReport = totalBytesTransferred - _currentFile.BytesTransferred; _currentFile.BytesTransferred = totalBytesTransferred; OnFileProgressChanged(new FileProgressChangedEventArgs(_currentFile.File.FullName, totalFileSize, totalBytesTransferred, elapsedTime)); // Report a change in transfer rate OnCurrentTransferRateChanged( new TransferRateChangedEventArgs(new TransferRateInfo(bytesCopiedSinceLastReport, elapsedTime))); // Update average transfer rate _averageTransferRate.TransferTime = _averageTransferRate.TransferTime.Add(elapsedTime); _averageTransferRate.BytesTransferred += bytesCopiedSinceLastReport; OnAverageTransferRateChanged( new TransferRateChangedEventArgs(new TransferRateInfo(_averageTransferRate.BytesTransferred, _averageTransferRate.TransferTime))); // Report change in directory progress if we're copying a directory if (_currentDirectory != null) { _currentDirectory.BytesTransferred += bytesCopiedSinceLastReport; OnDirectoryProgressChanged( new DirectoryProgressChangedEventArgs(_currentDirectory.Directory.FullName, _currentDirectory.Size, _currentDirectory.BytesTransferred)); } if (_queueInfo != null) { _queueInfo.QueueBytesTransferred += bytesCopiedSinceLastReport; OnQueueProgressChanged(_queueInfo); } //Console.WriteLine("Chunk done ..."); return(StopPending ? ProgressCancel : ProgressContinue); case CallbackStreamSwitch: //Console.WriteLine("Chunk done ..."); // A new stream was created. We don't care about this one - just continue the move operation. return(StopPending ? ProgressCancel : ProgressContinue); default: //Console.WriteLine("Chunk done ..."); return(StopPending ? ProgressCancel : ProgressContinue); } }
protected async Task <EthereumTestResult> RunTest(BlockchainTest test, Stopwatch?stopwatch = null) { TestContext.Write($"Running {test.Name} at {DateTime.UtcNow:HH:mm:ss.ffffff}"); Assert.IsNull(test.LoadFailure, "test data loading failure"); IDb stateDb = new MemDb(); IDb codeDb = new MemDb(); ISpecProvider specProvider; if (test.NetworkAfterTransition != null) { specProvider = new CustomSpecProvider(1, (0, Frontier.Instance), (1, test.Network), (test.TransitionBlockNumber, test.NetworkAfterTransition)); } else { specProvider = new CustomSpecProvider(1, (0, Frontier.Instance), // TODO: this thing took a lot of time to find after it was removed!, genesis block is always initialized with Frontier (1, test.Network)); } if (specProvider.GenesisSpec != Frontier.Instance) { Assert.Fail("Expected genesis spec to be Frontier for blockchain tests"); } DifficultyCalculator.Wrapped = new DifficultyCalculator(specProvider); IRewardCalculator rewardCalculator = new RewardCalculator(specProvider); IEthereumEcdsa ecdsa = new EthereumEcdsa(specProvider.ChainId, _logManager); TrieStore trieStore = new(stateDb, _logManager); IStateProvider stateProvider = new StateProvider(trieStore, codeDb, _logManager); MemDb blockInfoDb = new MemDb(); IBlockTree blockTree = new BlockTree(new MemDb(), new MemDb(), blockInfoDb, new ChainLevelInfoRepository(blockInfoDb), specProvider, NullBloomStorage.Instance, _logManager); ITransactionComparerProvider transactionComparerProvider = new TransactionComparerProvider(specProvider, blockTree); ITxPool transactionPool = new TxPool(NullTxStorage.Instance, ecdsa, new ChainHeadInfoProvider(specProvider, blockTree, stateProvider), new TxPoolConfig(), new TxValidator(specProvider.ChainId), _logManager, transactionComparerProvider.GetDefaultComparer()); IReceiptStorage receiptStorage = NullReceiptStorage.Instance; IBlockhashProvider blockhashProvider = new BlockhashProvider(blockTree, _logManager); ITxValidator txValidator = new TxValidator(ChainId.Mainnet); IHeaderValidator headerValidator = new HeaderValidator(blockTree, Sealer, specProvider, _logManager); IOmmersValidator ommersValidator = new OmmersValidator(blockTree, headerValidator, _logManager); IBlockValidator blockValidator = new BlockValidator(txValidator, headerValidator, ommersValidator, specProvider, _logManager); IStorageProvider storageProvider = new StorageProvider(trieStore, stateProvider, _logManager); IVirtualMachine virtualMachine = new VirtualMachine( stateProvider, storageProvider, blockhashProvider, specProvider, _logManager); IBlockProcessor blockProcessor = new BlockProcessor( specProvider, blockValidator, rewardCalculator, new TransactionProcessor( specProvider, stateProvider, storageProvider, virtualMachine, _logManager), stateProvider, storageProvider, transactionPool, receiptStorage, NullWitnessCollector.Instance, _logManager); IBlockchainProcessor blockchainProcessor = new BlockchainProcessor( blockTree, blockProcessor, new RecoverSignatures(ecdsa, NullTxPool.Instance, specProvider, _logManager), _logManager, BlockchainProcessor.Options.NoReceipts); InitializeTestState(test, stateProvider, storageProvider, specProvider); List <(Block Block, string ExpectedException)> correctRlp = new(); for (int i = 0; i < test.Blocks.Length; i++) { try { TestBlockJson testBlockJson = test.Blocks[i]; var rlpContext = Bytes.FromHexString(testBlockJson.Rlp).AsRlpStream(); Block suggestedBlock = Rlp.Decode <Block>(rlpContext); suggestedBlock.Header.SealEngineType = test.SealEngineUsed ? SealEngineType.Ethash : SealEngineType.None; Assert.AreEqual(new Keccak(testBlockJson.BlockHeader.Hash), suggestedBlock.Header.Hash, "hash of the block"); for (int ommerIndex = 0; ommerIndex < suggestedBlock.Ommers.Length; ommerIndex++) { Assert.AreEqual(new Keccak(testBlockJson.UncleHeaders[ommerIndex].Hash), suggestedBlock.Ommers[ommerIndex].Hash, "hash of the ommer"); } correctRlp.Add((suggestedBlock, testBlockJson.ExpectedException)); } catch (Exception) { _logger?.Info($"Invalid RLP ({i})"); } } if (correctRlp.Count == 0) { EthereumTestResult result; if (test.GenesisBlockHeader is null) { result = new EthereumTestResult(test.Name, "Genesis block header missing in the test spec."); } else if (!new Keccak(test.GenesisBlockHeader.Hash).Equals(test.LastBlockHash)) { result = new EthereumTestResult(test.Name, "Genesis hash mismatch"); } else { result = new EthereumTestResult(test.Name, null, true); } return(result); } if (test.GenesisRlp == null) { test.GenesisRlp = Rlp.Encode(new Block(JsonToEthereumTest.Convert(test.GenesisBlockHeader))); } Block genesisBlock = Rlp.Decode <Block>(test.GenesisRlp.Bytes); Assert.AreEqual(new Keccak(test.GenesisBlockHeader.Hash), genesisBlock.Header.Hash, "genesis header hash"); ManualResetEvent genesisProcessed = new(false); blockTree.NewHeadBlock += (_, args) => { if (args.Block.Number == 0) { Assert.AreEqual(genesisBlock.Header.StateRoot, stateProvider.StateRoot, "genesis state root"); genesisProcessed.Set(); } }; blockchainProcessor.Start(); blockTree.SuggestBlock(genesisBlock); genesisProcessed.WaitOne(); for (int i = 0; i < correctRlp.Count; i++) { stopwatch?.Start(); try { if (correctRlp[i].ExpectedException != null) { _logger.Info($"Expecting block exception: {correctRlp[i].ExpectedException}"); } if (correctRlp[i].Block.Hash == null) { throw new Exception($"null hash in {test.Name} block {i}"); } // TODO: mimic the actual behaviour where block goes through validating sync manager? if (!test.SealEngineUsed || blockValidator.ValidateSuggestedBlock(correctRlp[i].Block)) { blockTree.SuggestBlock(correctRlp[i].Block); } else { Console.WriteLine("Invalid block"); } } catch (InvalidBlockException) { } catch (Exception ex) { _logger?.Info(ex.ToString()); } } await blockchainProcessor.StopAsync(true); stopwatch?.Stop(); List <string> differences = RunAssertions(test, blockTree.RetrieveHeadBlock(), storageProvider, stateProvider); // if (differences.Any()) // { // BlockTrace blockTrace = blockchainProcessor.TraceBlock(blockTree.BestSuggested.Hash); // _logger.Info(new UnforgivingJsonSerializer().Serialize(blockTrace, true)); // } Assert.Zero(differences.Count, "differences"); return(new EthereumTestResult ( test.Name, null, differences.Count == 0 )); }
/// <summary> /// Metoda rozpoczynająca zlicanie czasu dla Stopwatch zegara /// </summary> /// <returns> void </returns> public void StartWatch() { watch.Start(); // rozpoczęcie pracy zegara }
static void Main(string[] args) { //Detailed = true; //DumpMembers(typeof(Tests.Class1<,,,,>)); //DumpMembers(typeof(System.Security.Policy.NetCodeGroup), true); List <string> argList = new List <string>(args); if (argList.Remove("/?")) { Console.WriteLine("Usage: Metadata.exe [options] [assembly list|*]"); Console.WriteLine(); Console.WriteLine("options:"); Console.WriteLine(" /ref use reflection; implies /load"); Console.WriteLine(" /load load assemblies"); Console.WriteLine(" /ep compares enumerating properties and events vs. methods and fields"); Console.WriteLine(" /t test all APIs, no output, catch any expected exception"); Console.WriteLine(" /f simple fuzzing test"); Console.WriteLine(" /u unit tests"); Console.WriteLine(" /d [options] dump"); Console.WriteLine(" n ... namespace tree (default)"); Console.WriteLine(" p ... PE file headers"); Console.WriteLine(" o ... module metadata"); Console.WriteLine(" s ... statistics"); Console.WriteLine(" e ... extension methods"); Console.WriteLine(" t ... nested types"); Console.WriteLine(" m ... members"); Console.WriteLine(" d ... member details"); Console.WriteLine(" /expect <path> compares /d output with a content of a file"); Console.WriteLine(); Console.WriteLine("assembly list"); Console.WriteLine(" -empty- current mscorlib.dll, System.Core.dll, System.dll"); Console.WriteLine(" * about 100 assemblies from the current .NET Framework"); Console.WriteLine(" file1 file2 ... specified files"); return; } // options: bool test = argList.Remove("/t"); bool fuzz = argList.Remove("/f"); bool unitTests = argList.Remove("/u"); bool useReflection = argList.Remove("/ref"); bool loadAssemblies = argList.Remove("/load"); bool enumProperties = argList.Remove("/ep"); if (fuzz && (test || enumProperties || useReflection)) { Console.Error.WriteLine("Can't use /f with /t, /ref or /ep"); return; } if (test && useReflection) { Console.Error.WriteLine("Can't use /ref with /t"); return; } if (enumProperties && useReflection) { Console.Error.WriteLine("Can't use /ref with /ep"); return; } if (useReflection) { loadAssemblies = true; } if (unitTests) { UnitTests.Run(); } string dumpOptions; int dumpIdx = argList.IndexOf("/d"); if (dumpIdx >= 0) { if (dumpIdx + 1 < argList.Count) { dumpOptions = argList[dumpIdx + 1]; argList.RemoveAt(dumpIdx + 1); } else { dumpOptions = "n"; } argList.RemoveAt(dumpIdx); } else { dumpOptions = null; } string expectedOutputFile; int expectedIdx = argList.IndexOf("/expect"); if (expectedIdx >= 0) { if (expectedIdx + 1 < argList.Count) { expectedOutputFile = argList[expectedIdx + 1]; argList.RemoveAt(expectedIdx + 1); } else { Console.Error.WriteLine("/expected requires a file"); return; } argList.RemoveAt(expectedIdx); } else { expectedOutputFile = null; } bool allFwAssemblies = argList.Remove("*"); // assemblies: IEnumerable <string> assemblyFiles = argList; if (allFwAssemblies) { string fwDir = Path.GetDirectoryName(typeof(object).Assembly.Location); assemblyFiles = assemblyFiles.Concat( from fileName in AssemblyList.FwAssemblies select Path.Combine(fwDir, fileName) ); } else if (argList.Count == 0) { argList.Add(typeof(object).Assembly.Location); argList.Add(typeof(Expression).Assembly.Location); argList.Add(typeof(Regex).Assembly.Location); } // tables: List <MetadataTables> tables; IEnumerable <Assembly> assemblies; if (loadAssemblies) { assemblies = LoadAssemblies(assemblyFiles); if (useReflection) { tables = null; } else { Stopwatch swTableLoad = new Stopwatch(); swTableLoad.Start(); tables = new List <MetadataTables>(from assembly in assemblies from module in assembly.GetModules(false) select module.GetMetadataTables()); swTableLoad.Stop(); _output.WriteLine("{0} modules loaded in {1}ms", tables.Count, swTableLoad.ElapsedMilliseconds); } } else { assemblies = null; Stopwatch swTableLoad = new Stopwatch(); tables = new List <MetadataTables>(); foreach (var file in assemblyFiles) { try { swTableLoad.Start(); tables.Add(MetadataTables.OpenFile(file)); swTableLoad.Stop(); } catch (FileNotFoundException) { _output.WriteLine("File {0} doesn't exist.", file); } catch (BadImageFormatException) { _output.WriteLine("{0} is not a valid PE file", file); } } if (dumpOptions == null) { _output.WriteLine("Metadata tables ({0}) loaded in {1}ms", tables.Count, swTableLoad.ElapsedMilliseconds); } } if (fuzz) { FuzzTables(tables); return; } if (test) { Test(tables); return; } if (dumpOptions != null) { string tempDumpFile = "dump.txt"; bool success = false; if (expectedOutputFile != null) { _output = new StreamWriter(tempDumpFile, false, new UTF8Encoding(true, false), 0x400); } try { try { DumpTables( tables, dumpPE: dumpOptions.IndexOf('p') >= 0, dumpModule: dumpOptions.IndexOf('o') >= 0, dumpStatistics: dumpOptions.IndexOf('s') >= 0, dumpNamespaces: dumpOptions.IndexOf('n') >= 0, dumpExtensionMethods: dumpOptions.IndexOf('e') >= 0, dumpNestedTypes: dumpOptions.IndexOf('t') >= 0, dumpMembers: dumpOptions.IndexOf('m') >= 0, dumpMemberDetails: dumpOptions.IndexOf('d') >= 0 ); } finally { if (expectedOutputFile != null) { _output.Close(); } } if (expectedOutputFile != null) { // TODO: extract zip and compare } success = true; } finally { if (success && _output != null) { File.Delete(tempDumpFile); } } return; } if (enumProperties) { Measure(() => EnumerateProperties(tables)); return; } if (assemblies != null) { if (useReflection) { Measure(() => ReflectionEnumerate(assemblies)); } else { Measure(() => Enumerate(tables)); } } else { Measure(() => Enumerate(tables)); } if (Counter_Files > 0) { _output.WriteLine("Enumerated {0} extension methods and {1} types.", Counter_ExtensionMethods, Counter_Types); _output.WriteLine("PropertyMap and EventMap sizes: max {0}, avg {1} per file.", Counter_MaxMapTablesSize, Counter_TotalMapTablesSize / Counter_Files); } }
//you are advised not to set more than 3 parallel threads if you are doing this on your laptop, otherwise the laptop will not survive private static void Main(string[] args) { //Console.WriteLine(Cards.FromName("This is an invalid name") == Cards.FromName("Default"));-ignore this, this was for testing Console.WriteLine("Starting test setup. v6.7: run in parallel " + parallelThreads + "x and in each parallel, no of tasks:" + testsInEachThread + " and inner parallel:" + parallelThreadsInner + " and each within inner parallel, inner tasks:" + testsInEachThreadInner + " times, different decks, get winrates and time avg of each and print max depth =" + maxDepth + " , max width = " + maxWidth + ""); Sabbertest2.CreateAndMutate createMutateObj = new CreateAndMutate(); //this is the class I added which contains all functions1 //this above object will help you mutate or create a deck, without worrying about underlying code. Dictionary <int, List <Card> > victoryMany = new Dictionary <int, List <Card> >(); //OneTurn();-ignore this Dictionary <int, string> allCards = getAllCards();//important function, must be done in main before anything else, this will get all the valid hearthstone cards (1300+ cards in total) from the data file // string[] results = new string[100];//max 15 tests, can be increased/changed without any problems, make sure max j * max i <= size of results array Dictionary <int, string> results = new Dictionary <int, string>(); List <Card> victorious = new List <Card>(); List <Card> playerDeck = createMutateObj.createRandomDeck(allCards, cardname);//this liine returns randomly created deck from all cards in hearthsone, which is passed as parameter bool end = false; List <Card> playerDeck2 = Decks.MidrangeJadeShaman; stopwatch.Start(); //DateTime start = new DateTime(); //DateTime stop = new DateTime(); Parallel.For(0, parallelThreads, j => { // int i = 0; // while (!end) // for (int i = testsInEachThread * j; i < (j + 1) * testsInEachThread; i++)//(int i = 0; i < 10 ; i++) // { if (!results.ContainsKey(j)) { Console.WriteLine("outer i, deck number=" + j); Console.WriteLine("Printing Deck player 1, loop is here=" + j); createMutateObj.print(playerDeck); Console.WriteLine("Printing Deck player 2 loop is here=" + j); createMutateObj.print(playerDeck); string winRate_timeMean = getWinRateTimeMean(playerDeck, j, playerDeck2); results.Add(j, winRate_timeMean); Console.WriteLine(results[j]); } /*if (results[i].Contains("Player1: WON"))//-this is to get the deck only if it wins * { * playerDeck2 = playerDeck; * Console.WriteLine("Testing with inital deck...."); * string s = "Testing with inital deck...."; * stopwatch.Start(); * s = s + FullGame(playerDeck, i, Decks.MidrangeJadeShaman); * stopwatch.Stop(); * long seconds = (stopwatch.ElapsedMilliseconds / 1000);//(stop - start).ToString();// * TimeSpan t = TimeSpan.FromSeconds(seconds); * if (s.Contains("Player1: LOST")) * { * s = s + " time taken to execute completely (hh:mm:ss): " + t.ToString(); * results.Add(i+1, s); * break; * } * playerDeck = playerDeck = createMutateObj.createRandomDeck(allCards, cardname); * * //break; * //victorious = playerDeck; * /* if (!victoryMany.ContainsKey(i)) * { * victoryMany.Add(i, playerDeck); //this gives the last deck that played the game * } * //end = true; * } * else * { * playerDeck = createMutateObj.mutate(playerDeck, allCards, cardname);//keep mutating till victory * }*/ //i++; if (j == 100) { // end = true; } playerDeck = createMutateObj.createRandomDeck(allCards, cardname); } }); /* for (int i = 0; i < results.Length; i++)//for 15 results here, if parallel threads * testInEachThread = 6, then 6 tests will show here * { * Console.WriteLine("Game " + i + " : " + results[i] + "\n"); * }*/ foreach (int key in results.Keys) { Console.WriteLine("Game " + key + " : " + results[key] + "\n"); } Console.WriteLine("Before Mutation Victory Decks:"); stopwatch.Stop(); TimeSpan tempeForOverall = TimeSpan.FromSeconds(stopwatch.ElapsedMilliseconds / 1000); Console.WriteLine("Overall time taken:" + tempeForOverall.ToString()); /* createMutateObj.print(victorious); * * * List<Card> myDeck = new List<Card>(); * myDeck = victorious;//myDeck can be anything, I have made it =victorious/last deck created in the loop. * List<Card> mutated = createMutateObj.mutate(myDeck, allCards, cardname);//make your deck myDeck and pass it here to mutate it. * * //RandomGames(); - ignore this line * Console.WriteLine("\n Mutated Deck: \n"); * createMutateObj.print(mutated); * Console.WriteLine("Test end!");*/ Console.ReadLine(); }
static void Main(string[] args) { string model = null; string scorer = null; string audio = null; bool extended = false; if (args.Length > 0) { model = GetArgument(args, "--model"); scorer = GetArgument(args, "--scorer"); audio = GetArgument(args, "--audio"); extended = !string.IsNullOrWhiteSpace(GetArgument(args, "--extended")); } Stopwatch stopwatch = new Stopwatch(); try { Console.WriteLine("Loading model..."); stopwatch.Start(); // sphinx-doc: csharp_ref_model_start using (IDeepSpeech sttClient = new DeepSpeech(model ?? "output_graph.pbmm")) { // sphinx-doc: csharp_ref_model_stop stopwatch.Stop(); Console.WriteLine($"Model loaded - {stopwatch.Elapsed.Milliseconds} ms"); stopwatch.Reset(); if (scorer != null) { Console.WriteLine("Loading scorer..."); sttClient.EnableExternalScorer(scorer ?? "kenlm.scorer"); } string audioFile = audio ?? "arctic_a0024.wav"; var waveBuffer = new WaveBuffer(File.ReadAllBytes(audioFile)); using (var waveInfo = new WaveFileReader(audioFile)) { Console.WriteLine("Running inference...."); stopwatch.Start(); string speechResult; // sphinx-doc: csharp_ref_inference_start if (extended) { Metadata metaResult = sttClient.SpeechToTextWithMetadata(waveBuffer.ShortBuffer, Convert.ToUInt32(waveBuffer.MaxSize / 2), 1); speechResult = MetadataToString(metaResult.Transcripts[0]); } else { speechResult = sttClient.SpeechToText(waveBuffer.ShortBuffer, Convert.ToUInt32(waveBuffer.MaxSize / 2)); } // sphinx-doc: csharp_ref_inference_stop stopwatch.Stop(); Console.WriteLine($"Audio duration: {waveInfo.TotalTime.ToString()}"); Console.WriteLine($"Inference took: {stopwatch.Elapsed.ToString()}"); Console.WriteLine((extended ? $"Extended result: " : "Recognized text: ") + speechResult); } waveBuffer.Clear(); } } catch (Exception ex) { Console.WriteLine(ex.Message); } }
private void WriteFlood(CommandProcessorContext context, int clientsCnt, long requestsCnt, int streamsCnt, int size) { context.IsAsync(); var doneEvent = new ManualResetEventSlim(false); var clients = new List <TcpTypedConnection <byte[]> >(); var threads = new List <Thread>(); long succ = 0; long fail = 0; long prepTimeout = 0; long commitTimeout = 0; long forwardTimeout = 0; long wrongExpVersion = 0; long streamDeleted = 0; long all = 0; var streams = Enumerable.Range(0, streamsCnt).Select(x => Guid.NewGuid().ToString()).ToArray(); //var streams = Enumerable.Range(0, streamsCnt).Select(x => string.Format("stream-{0}", x)).ToArray(); var sw2 = new Stopwatch(); for (int i = 0; i < clientsCnt; i++) { var count = requestsCnt / clientsCnt + ((i == clientsCnt - 1) ? requestsCnt % clientsCnt : 0); long sent = 0; long received = 0; var rnd = new Random(); var client = context.Client.CreateTcpConnection( context, (conn, pkg) => { if (pkg.Command != TcpCommand.WriteEventsCompleted) { context.Fail(reason: string.Format("Unexpected TCP package: {0}.", pkg.Command)); return; } var dto = pkg.Data.Deserialize <TcpClientMessageDto.WriteEventsCompleted>(); switch (dto.Result) { case TcpClientMessageDto.OperationResult.Success: if (Interlocked.Increment(ref succ) % 1000 == 0) { Console.Write('.'); } break; case TcpClientMessageDto.OperationResult.PrepareTimeout: Interlocked.Increment(ref prepTimeout); break; case TcpClientMessageDto.OperationResult.CommitTimeout: Interlocked.Increment(ref commitTimeout); break; case TcpClientMessageDto.OperationResult.ForwardTimeout: Interlocked.Increment(ref forwardTimeout); break; case TcpClientMessageDto.OperationResult.WrongExpectedVersion: Interlocked.Increment(ref wrongExpVersion); break; case TcpClientMessageDto.OperationResult.StreamDeleted: Interlocked.Increment(ref streamDeleted); break; default: throw new ArgumentOutOfRangeException(); } if (dto.Result != TcpClientMessageDto.OperationResult.Success) { if (Interlocked.Increment(ref fail) % 1000 == 0) { Console.Write('#'); } } Interlocked.Increment(ref received); var localAll = Interlocked.Increment(ref all); if (localAll % 100000 == 0) { var elapsed = sw2.Elapsed; sw2.Restart(); context.Log.Trace("\nDONE TOTAL {0} WRITES IN {1} ({2:0.0}/s) [S:{3}, F:{4} (WEV:{5}, P:{6}, C:{7}, F:{8}, D:{9})].", localAll, elapsed, 1000.0 * 100000 / elapsed.TotalMilliseconds, succ, fail, wrongExpVersion, prepTimeout, commitTimeout, forwardTimeout, streamDeleted); } if (localAll == requestsCnt) { context.Success(); doneEvent.Set(); } }, connectionClosed: (conn, err) => context.Fail(reason: "Connection was closed prematurely.")); clients.Add(client); threads.Add(new Thread(() => { for (int j = 0; j < count; ++j) { var write = new TcpClientMessageDto.WriteEvents( streams[rnd.Next(streamsCnt)], ExpectedVersion.Any, new[] { new TcpClientMessageDto.NewEvent(Guid.NewGuid().ToByteArray(), "TakeSomeSpaceEvent", 1, 0, Common.Utils.Helper.UTF8NoBom.GetBytes("{ \"DATA\" : \"" + new string('*', size) + "\"}"), Common.Utils.Helper.UTF8NoBom.GetBytes("{ \"METADATA\" : \"" + new string('$', 100) + "\"}")) }, false); var package = new TcpPackage(TcpCommand.WriteEvents, Guid.NewGuid(), write.Serialize()); client.EnqueueSend(package.AsByteArray()); var localSent = Interlocked.Increment(ref sent); while (localSent - Interlocked.Read(ref received) > context.Client.Options.WriteWindow / clientsCnt) { Thread.Sleep(1); } } }) { IsBackground = true }); } var sw = Stopwatch.StartNew(); sw2.Start(); threads.ForEach(thread => thread.Start()); doneEvent.Wait(); sw.Stop(); clients.ForEach(client => client.Close()); context.Log.Info("Completed. Successes: {0}, failures: {1} (WRONG VERSION: {2}, P: {3}, C: {4}, F: {5}, D: {6})", succ, fail, wrongExpVersion, prepTimeout, commitTimeout, forwardTimeout, streamDeleted); var reqPerSec = (all + 0.0) / sw.ElapsedMilliseconds * 1000; context.Log.Info("{0} requests completed in {1}ms ({2:0.00} reqs per sec).", all, sw.ElapsedMilliseconds, reqPerSec); PerfUtils.LogData( Keyword, PerfUtils.Row(PerfUtils.Col("clientsCnt", clientsCnt), PerfUtils.Col("requestsCnt", requestsCnt), PerfUtils.Col("ElapsedMilliseconds", sw.ElapsedMilliseconds)), PerfUtils.Row(PerfUtils.Col("successes", succ), PerfUtils.Col("failures", fail))); var failuresRate = (int)(100 * fail / (fail + succ)); PerfUtils.LogTeamCityGraphData(string.Format("{0}-{1}-{2}-reqPerSec", Keyword, clientsCnt, requestsCnt), (int)reqPerSec); PerfUtils.LogTeamCityGraphData(string.Format("{0}-{1}-{2}-failureSuccessRate", Keyword, clientsCnt, requestsCnt), failuresRate); PerfUtils.LogTeamCityGraphData(string.Format("{0}-c{1}-r{2}-st{3}-s{4}-reqPerSec", Keyword, clientsCnt, requestsCnt, streamsCnt, size), (int)reqPerSec); PerfUtils.LogTeamCityGraphData(string.Format("{0}-c{1}-r{2}-st{3}-s{4}-failureSuccessRate", Keyword, clientsCnt, requestsCnt, streamsCnt, size), failuresRate); if (Interlocked.Read(ref succ) != requestsCnt) { context.Fail(reason: "There were errors or not all requests completed."); } else { context.Success(); } }
private List <Tuple <byte[], int> > loadFile(string strImagesFile, string strLabelsFile, string strExportPath) { if (!Directory.Exists(strExportPath)) { Directory.CreateDirectory(strExportPath); } Stopwatch sw = new Stopwatch(); reportProgress(0, 0, " loading " + strImagesFile + "..."); BinaryFile image_file = new BinaryFile(strImagesFile); BinaryFile label_file = new BinaryFile(strLabelsFile); List <Tuple <byte[], int> > rgData = new List <Tuple <byte[], int> >(); try { // Verify the files uint magicImg = image_file.ReadUInt32(); uint magicLbl = label_file.ReadUInt32(); if (magicImg != 2051) { throw new Exception("Incorrect image file magic."); } if (magicLbl != 2049) { throw new Exception("Incorrect label file magic."); } uint num_items = image_file.ReadUInt32(); uint num_labels = label_file.ReadUInt32(); if (num_items != num_labels) { throw new Exception("The number of items must be equal to the number of labels!"); } // Add the data source to the database. uint rows = image_file.ReadUInt32(); uint cols = image_file.ReadUInt32(); m_nHeight = (int)rows; m_nWidth = (int)cols; // Storing to database; byte[] rgLabel; byte[] rgPixels; string strAction = "loading"; reportProgress(0, (int)num_items, " " + strAction + " a total of " + num_items.ToString() + " items."); reportProgress(0, (int)num_items, " (with rows: " + rows.ToString() + ", cols: " + cols.ToString() + ")"); sw.Start(); for (int i = 0; i < num_items; i++) { rgPixels = image_file.ReadBytes((int)(rows * cols)); rgLabel = label_file.ReadBytes(1); rgData.Add(new Tuple <byte[], int>(rgPixels, rgLabel[0])); if (sw.Elapsed.TotalMilliseconds > 1000) { reportProgress(i, (int)num_items, " " + strAction + " data..."); sw.Restart(); } } reportProgress((int)num_items, (int)num_items, " " + strAction + " completed."); } finally { image_file.Dispose(); label_file.Dispose(); } return(rgData); }
public new IFileInfo GetFileInfo(string subpath) { var fileInfo = base.GetFileInfo(subpath); if (fileInfo.Exists) { return(fileInfo); } var matchResult = _options.IsMatch(subpath, _options.PathMatch); if (!matchResult.Result) { return(fileInfo); } var imageHandleDto = matchResult.Data; //生成文件 fileInfo = base.GetFileInfo($"{imageHandleDto.ImagePath}{imageHandleDto.ImageName}.{imageHandleDto.ImageExtensions}"); if (!fileInfo.Exists) { return(fileInfo); } var newImagePath = Path.Join(Root.Replace("\\", "/"), subpath).Replace("//", "/"); //_stopwatch.Start(); //var settings = new ProcessImageSettings() //{ // Width = imageHandleDto.ImageWidth, // Height = imageHandleDto.ImageHeight, // ResizeMode = CropScaleMode.Max, // SaveFormat = FileFormat.Jpeg, // JpegQuality = 75, // JpegSubsampleMode = ChromaSubsampleMode.Subsample420 //}; //using (var output = new FileStream(newImagePath, FileMode.Create)) //{ // MagicImageProcessor.ProcessImage(fileInfo.PhysicalPath, output, settings); //} //_stopwatch.Stop(); //_logger.LogDebug($"Magick.NET耗时{_stopwatch.ElapsedMilliseconds}ms"); //File.Delete(newImagePath); //_stopwatch.Restart(); //using (var original = FreeImageBitmap.FromFile(fileInfo.PhysicalPath)) //{ // using (var resized = new FreeImageBitmap(original, imageHandleDto.ImageWidth, imageHandleDto.ImageHeight)) // { // resized.Save(newImagePath, FREE_IMAGE_FORMAT.FIF_JPEG, // FREE_IMAGE_SAVE_FLAGS.JPEG_QUALITYGOOD | // FREE_IMAGE_SAVE_FLAGS.JPEG_BASELINE); // } //} //_stopwatch.Stop(); //_logger.LogDebug($"FreeImage FromFilePath耗时{_stopwatch.ElapsedMilliseconds}ms"); //File.Delete(newImagePath); //_stopwatch.Restart(); //using (var original = FreeImageBitmap.FromStream(fileInfo.CreateReadStream())) //{ // using (var resized = new FreeImageBitmap(original, imageHandleDto.ImageWidth, imageHandleDto.ImageHeight)) // { // resized.Save(newImagePath, FREE_IMAGE_FORMAT.FIF_JPEG, // FREE_IMAGE_SAVE_FLAGS.JPEG_QUALITYGOOD | // FREE_IMAGE_SAVE_FLAGS.JPEG_BASELINE); // } //} //_stopwatch.Stop(); //_logger.LogDebug($"FreeImage FromFileStream耗时{_stopwatch.ElapsedMilliseconds}ms"); _stopwatch.Start(); FREE_IMAGE_FORMAT imageFormat = 0; switch (imageHandleDto.ImageExtensions.ToLower()) { case "jpg": case "jpeg": imageFormat = FREE_IMAGE_FORMAT.FIF_JPEG; break; case "png": imageFormat = FREE_IMAGE_FORMAT.FIF_PNG; break; case "gif": imageFormat = FREE_IMAGE_FORMAT.FIF_GIF; break; case "ico": imageFormat = FREE_IMAGE_FORMAT.FIF_ICO; break; default: imageFormat = FREE_IMAGE_FORMAT.FIF_UNKNOWN; break; } using (var original = FreeImageBitmap.FromFile(fileInfo.PhysicalPath)) { var size = original.Width / (double)imageHandleDto.ImageWidth; if ((imageHandleDto.ImageHeight * size) > original.Height) { size = original.Height / (double)imageHandleDto.ImageHeight; } var width = original.Width / size; var height = original.Height / size; width = width > original.Width ? original.Width : width; height = height > original.Height ? original.Height : height; using (var resized = original.GetScaledInstance((int)width, (int)height, FREE_IMAGE_FILTER.FILTER_BICUBIC)) { double left, top, right, bottom; var halfWidth = Math.Floor(Convert.ToDouble(imageHandleDto.ImageWidth / 2)); var halfHeight = Math.Floor(Convert.ToDouble(imageHandleDto.ImageHeight / 2)); var centerX = Math.Round(width / 2); var centerY = Math.Round(height / 2); if (resized.Width > imageHandleDto.ImageWidth) { left = centerX - halfWidth; right = centerX + halfWidth; } else { left = 0; right = resized.Width; } if (resized.Height > imageHandleDto.ImageHeight) { bottom = centerY - halfHeight; top = centerY + halfHeight; } else { bottom = 0; top = resized.Height; } using (var crop = resized.Copy((int)left, (int)top, (int)right, (int)bottom)) { //, FREE_IMAGE_FORMAT.FIF_JPEG, FREE_IMAGE_SAVE_FLAGS.JPEG_QUALITYGOOD | FREE_IMAGE_SAVE_FLAGS.JPEG_BASELINE crop.Save(newImagePath, imageFormat, FREE_IMAGE_SAVE_FLAGS.DEFAULT); } } } _stopwatch.Stop(); _logger.LogDebug($"FreeImage FromFilePath耗时{_stopwatch.ElapsedMilliseconds}ms"); fileInfo = base.GetFileInfo(subpath); return(fileInfo); }
private async void OperateAsyncCommand(SqlCommand _command) { var commandLocal = _command; var timer = new Stopwatch(); var token = new CancellationToken(); var pullValue = new PullValue(); var eventArgs = new AsyncLogEventArgs(); string directionPath = ""; try { commandLocal.CommandTimeout = 99999; timer.Start(); tokens.Add(token); pullValue.Id = _command.Parameters[0].Value.ToString(); if (commandLocal.Connection.State == System.Data.ConnectionState.Closed) { commandLocal.Connection.Open(); } if (workDirectories.ContainsKey(pullValue.Id)) { commandLocal.CommandText += ", N'" + workDirectories[pullValue.Id] + "'"; } pullValue.ISError = false; pullValue.Status = TaskRunningStatus.Running; pullValue.Log = "Начинается обработка значения..."; FileLogger.GetInstance().WriteLog(pullValue, DateTime.Now, this.RegionID); var task = await commandLocal.ExecuteNonQueryAsync(token); directionPath = workDirectories[pullValue.Id]; pullValue.Log = "Ожидается обработка реплики..."; FileLogger.GetInstance().WriteLog(pullValue, DateTime.Now, this.RegionID); var waitTask = WaitForFile(directionPath + "\\gmmq.package.end"); waitTask.Wait(OperationsAPI.PackageExistWaitTimeOut); pullValue.Log = string.Format("Операция успешна"); pullValue.ISError = false; pullValue.Status = TaskRunningStatus.Success; FileLogger.GetInstance().WriteLog(pullValue, DateTime.Now, this.RegionID); } catch (Exception e) { pullValue.Log = string.Format("Операция завершилась со следующей ошибкой:{0}", e.Message); pullValue.ISError = true; pullValue.Status = TaskRunningStatus.Failure; FileLogger.GetInstance().WriteLog(pullValue, DateTime.Now, this.RegionID); if (commandLocal.Connection.State == System.Data.ConnectionState.Open) { commandLocal.Connection.Close(); } } finally { timer.Stop(); lock (this) { --lastElementCount; } pullValue.Time = string.Format("{0:d2}:{1:d2}:{2:d4}", timer.Elapsed.Minutes, timer.Elapsed.Seconds, timer.Elapsed.Milliseconds); logs.Add(pullValue); tokens.Remove(token); if (commandLocal.Connection.State == System.Data.ConnectionState.Open) { commandLocal.Connection.Close(); } FinishProcess(); } }
private void testThread(object obj) { Tuple <AutoResetEvent, bool, bool, int> param = obj as Tuple <AutoResetEvent, bool, bool, int>; AutoResetEvent evtCancel = param.Item1; bool bSkip = param.Item2; bool bServerMode = param.Item3; int nGpuId = param.Item4; TestClass tcCurrent = null; MethodInfoEx miCurrent = null; m_nCurrentTest = 0; m_swTiming.Reset(); m_swTiming.Start(); string strSrcStart = "MyCaffe Automated Test Start"; string strSrcResult = "MyCaffe Automated Test Result"; string strLog = "Application"; EventLog eventLogStart = new EventLog(strLog); eventLogStart.Source = strSrcStart; EventLog eventLogResult = new EventLog(strLog); eventLogResult.Source = strSrcResult; try { foreach (TestClass tc in m_rgClasses) { tcCurrent = tc; foreach (MethodInfoEx mi in tc.Methods) { miCurrent = mi; if (evtCancel.WaitOne(0)) { return; } if (mi.Enabled && (!bServerMode || mi.Status == MethodInfoEx.STATUS.NotExecuted)) { m_strCurrentTest = tc.Name + "::" + mi.Name; if (bSkip) { mi.ErrorInfo.SetError(new Exception("SKIPPED")); mi.Status = MethodInfoEx.STATUS.Failed; } else { eventLogStart.WriteEntry("Starting " + tc.Name + "::" + mi.Name + " test."); mi.Invoke(tc.Instance, nGpuId); if (mi.Status == MethodInfoEx.STATUS.Failed) { eventLogResult.WriteEntry("ERROR " + tc.Name + "::" + mi.Name + " test - " + mi.Status.ToString() + " Error Information: " + mi.ErrorInfo.FullErrorString, EventLogEntryType.Warning); } else { eventLogResult.WriteEntry("Completed " + tc.Name + "::" + mi.Name + " test - " + mi.Status.ToString(), EventLogEntryType.Information); } } if (mi.Status != MethodInfoEx.STATUS.Aborted) { SaveToDatabase(tc, mi); } } m_nCurrentTest++; } } } catch (Exception excpt) { SaveToDatabase(tcCurrent, miCurrent, excpt); tcCurrent.InvokeDispose(); eventLogStart.WriteEntry("Test Exception Thrown! " + excpt.Message, EventLogEntryType.Error); throw excpt; } finally { m_swTiming.Stop(); if (OnRunCompleted != null) { OnRunCompleted(this, new EventArgs()); } eventLogStart.Close(); eventLogResult.Close(); } }
public override void ActionExcute() { Stopwatch sw = new Stopwatch(); sw.Start(); Mat homography = null; Mat mask = null; VectorOfKeyPoint observedKeyPoints = new VectorOfKeyPoint(); VectorOfVectorOfDMatch matches = new VectorOfVectorOfDMatch(); Image <Gray, byte> image = imageInput.Clone(); for (int i = 0; i < actionMatchData.time; i++) { image = image.PyrDown(); } if (0 != actionMatchData.InputAOIWidth && 0 != actionMatchData.InputAOIHeight) { image.ROI = new Rectangle(actionMatchData.InputAOIX, actionMatchData.InputAOIY, actionMatchData.InputAOIWidth, actionMatchData.InputAOIHeight); } PointF center; if (null != modelDescriptors) { UMat b1 = image.ToUMat(); UMat observedDescriptors = new UMat(); //进行检测和计算,把opencv中的两部分和到一起了,分开用也可以 surf.DetectAndCompute(b1, null, observedKeyPoints, observedDescriptors, false); BFMatcher matcher = new BFMatcher(DistanceType.L2Sqr); //开始进行匹配 matcher.Add(modelDescriptors); matcher.KnnMatch(observedDescriptors, matches, 2, null); mask = new Mat(matches.Size, 1, DepthType.Cv8U, 1); mask.SetTo(new MCvScalar(255)); Features2DToolbox.VoteForUniqueness(matches, 0.8, mask); //去除重复的匹配 int Count = CvInvoke.CountNonZero(mask); //用于寻找模板在图中的位置 if (Count >= 4) { Count = Features2DToolbox.VoteForSizeAndOrientation(modelKeyPoints, observedKeyPoints, matches, mask, 1.5, 20); if (Count >= 4) { homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(modelKeyPoints, observedKeyPoints, matches, mask, 2); } } Mat result1 = new Mat(); Features2DToolbox.DrawMatches(_imageTempleAOI.Convert <Gray, byte>().Mat, modelKeyPoints, image.Convert <Gray, byte>().Mat, observedKeyPoints, matches, result1, new MCvScalar(255, 0, 255), new MCvScalar(0, 255, 255), mask); //绘制匹配的关系图 if (homography != null) //如果在图中找到了模板,就把它画出来 { Rectangle rect = new Rectangle(Point.Empty, _imageTempleAOI.Size); PointF[] points = new PointF[] { new PointF(rect.Left, rect.Bottom), new PointF(rect.Right, rect.Bottom), new PointF(rect.Right, rect.Top), new PointF(rect.Left, rect.Top) }; points = CvInvoke.PerspectiveTransform(points, homography); Point[] points2 = Array.ConvertAll <PointF, Point>(points, Point.Round); VectorOfPoint vp = new VectorOfPoint(points2); CvInvoke.Polylines(result1, vp, true, new MCvScalar(255, 0, 0, 255), 15); dResultX = (points[0].X + points[1].X + points[2].X + points[3].X) / 4 * ((float)Math.Pow(2, actionMatchData.time)); dResultY = (points[0].Y + points[1].Y + points[2].Y + points[3].Y) / 4 * ((float)Math.Pow(2, actionMatchData.time)); Point point1 = new Point(Convert.ToInt32((points[0].X + points[3].X) / 2), Convert.ToInt32((points[0].Y + points[3].Y) / 2)); Point point2 = new Point(Convert.ToInt32((points[1].X + points[2].X) / 2), Convert.ToInt32((points[1].Y + points[2].Y) / 2)); CvInvoke.Line(result1, point1, point2, new MCvScalar(255, 0, 0), 1, Emgu.CV.CvEnum.LineType.EightConnected, 0); dResultAngle = Math.Atan2((point2.Y - point1.Y), (point2.X - point1.X)) * 180 / Math.PI; imageDescript = result1; } else { actionRes = ActionResponse.NG; return; } center = new PointF((float)dResultX, (float)dResultY); } else { center = new PointF(imageInput.Width / 2, imageInput.Height / 2); } Mat rotation = new Mat(); CvInvoke.GetRotationMatrix2D(center, dResultAngle + actionMatchData.fOffsetAngle, 1, rotation); Image <Gray, float> mat = new Image <Gray, float>(new Size(3, 2)); CvInvoke.cvSet2D(mat, 0, 2, new MCvScalar(actionMatchData.fOffsetX - dResultX)); CvInvoke.cvSet2D(mat, 1, 2, new MCvScalar(actionMatchData.fOffsetY - dResultY)); CvInvoke.cvSet2D(mat, 0, 0, new MCvScalar(1)); CvInvoke.cvSet2D(mat, 1, 1, new MCvScalar(1)); System.Drawing.Size roisize = new Size(imageInput.Bitmap.Width, imageInput.Bitmap.Height); try { if (null == imageResult) { imageResult = new Image <Gray, byte>(imageInput.Size); } imageInput.Draw(new CircleF(center, 3), new Gray(255), 3); CvInvoke.WarpAffine(imageInput, imageResult, rotation, roisize); CvInvoke.WarpAffine(imageResult, imageResult, mat, roisize); } catch (Exception ex) { MessageBox.Show(ex.Message); } actionRes = ActionResponse.OK; sw.Stop(); }
protected override void BenchmarkThreadRoutine(object commandLine) { BenchmarkSignalQuit = false; BenchmarkSignalHanged = false; BenchmarkSignalFinnished = false; BenchmarkException = null; Thread.Sleep(ConfigManager.GeneralConfig.MinerRestartDelayMS); try { Helpers.ConsolePrint("BENCHMARK", "Benchmark starts"); Helpers.ConsolePrint(MinerTag(), "Benchmark should end in : " + _benchmarkTimeWait + " seconds"); BenchmarkHandle = BenchmarkStartProcess((string)commandLine); BenchmarkHandle.WaitForExit(_benchmarkTimeWait + 2); var benchmarkTimer = new Stopwatch(); benchmarkTimer.Reset(); benchmarkTimer.Start(); //BenchmarkThreadRoutineStartSettup(); // wait a little longer then the benchmark routine if exit false throw //var timeoutTime = BenchmarkTimeoutInSeconds(BenchmarkTimeInSeconds); //var exitSucces = BenchmarkHandle.WaitForExit(timeoutTime * 1000); // don't use wait for it breaks everything BenchmarkProcessStatus = BenchmarkProcessStatus.Running; var keepRunning = true; while (keepRunning && IsActiveProcess(BenchmarkHandle.Id)) { //string outdata = BenchmarkHandle.StandardOutput.ReadLine(); //BenchmarkOutputErrorDataReceivedImpl(outdata); // terminate process situations if (benchmarkTimer.Elapsed.TotalSeconds >= (_benchmarkTimeWait + 2) || BenchmarkSignalQuit || BenchmarkSignalFinnished || BenchmarkSignalHanged || BenchmarkSignalTimedout || BenchmarkException != null) { var imageName = MinerExeName.Replace(".exe", ""); // maybe will have to KILL process KillMinerBase(imageName); if (BenchmarkSignalTimedout) { throw new Exception("Benchmark timedout"); } if (BenchmarkException != null) { throw BenchmarkException; } if (BenchmarkSignalQuit) { throw new Exception("Termined by user request"); } if (BenchmarkSignalFinnished) { break; } keepRunning = false; break; } // wait a second reduce CPU load Thread.Sleep(1000); } } catch (Exception ex) { BenchmarkThreadRoutineCatch(ex); } finally { BenchmarkAlgorithm.BenchmarkSpeed = 0; // find latest log file var latestLogFile = ""; var dirInfo = new DirectoryInfo(WorkingDirectory); foreach (var file in dirInfo.GetFiles(GetLogFileName())) { latestLogFile = file.Name; break; } // read file log if (File.Exists(WorkingDirectory + latestLogFile)) { var lines = new string[0]; var read = false; var iteration = 0; while (!read) { if (iteration < 10) { try { lines = File.ReadAllLines(WorkingDirectory + latestLogFile); read = true; Helpers.ConsolePrint(MinerTag(), "Successfully read log after " + iteration + " iterations"); } catch (Exception ex) { Helpers.ConsolePrint(MinerTag(), ex.Message); Thread.Sleep(1000); } iteration++; } else { read = true; // Give up after 10s Helpers.ConsolePrint(MinerTag(), "Gave up on iteration " + iteration); } } var addBenchLines = BenchLines.Count == 0; foreach (var line in lines) { if (line != null) { BenchLines.Add(line); var lineLowered = line.ToLower(); if (lineLowered.Contains(LookForStart)) { _benchmarkSum += GetNumber(lineLowered); ++_benchmarkReadCount; } } } if (_benchmarkReadCount > 0) { BenchmarkAlgorithm.BenchmarkSpeed = _benchmarkSum / _benchmarkReadCount; } } BenchmarkThreadRoutineFinish(); } }
public void ExportAllComputers(string filename) { DisplayAdvancement("Getting computer list"); List <string> computers = GetListOfComputerToExplore(); DisplayAdvancement(computers.Count + " computers to explore"); int numberOfThread = 50; BlockingQueue <string> queue = new BlockingQueue <string>(70); Thread[] threads = new Thread[numberOfThread]; Dictionary <string, string> SIDConvertion = new Dictionary <string, string>(); int record = 0; using (StreamWriter sw = File.CreateText(filename)) { sw.WriteLine(GetCsvHeader()); try { ThreadStart threadFunction = () => { for (; ;) { string computer = null; if (!queue.Dequeue(out computer)) { break; } Trace.WriteLine("Working on computer " + computer); Stopwatch stopWatch = new Stopwatch(); try { string s = GetCsvData(computer); if (s != null) { lock (_syncRoot) { record++; sw.WriteLine(s); if ((record % 20) == 0) { sw.Flush(); } } } } catch (Exception ex) { stopWatch.Stop(); Trace.WriteLine("Computer " + computer + " " + ex.Message + " after " + stopWatch.Elapsed); } } }; // Consumers for (int i = 0; i < numberOfThread; i++) { threads[i] = new Thread(threadFunction); threads[i].Start(); } // do it in parallele int j = 0; int smallstep = 25; int bigstep = 1000; DateTime start = DateTime.Now; Stopwatch watch = new Stopwatch(); watch.Start(); foreach (string computer in computers) { j++; queue.Enqueue(computer); if (j % smallstep == 0) { string ETCstring = null; if (j > smallstep && (j - smallstep) % bigstep != 0) { ClearCurrentConsoleLine(); } if (j > bigstep) { long totalTime = ((long)(watch.ElapsedMilliseconds * computers.Count) / j); ETCstring = " [ETC:" + start.AddMilliseconds(totalTime).ToLongTimeString() + "]"; } DisplayAdvancement(j + " on " + computers.Count + ETCstring); } } queue.Quit(); Trace.WriteLine("insert computer completed. Waiting for worker thread to complete"); for (int i = 0; i < numberOfThread; i++) { threads[i].Join(); } Trace.WriteLine("Done insert file"); } finally { queue.Quit(); for (int i = 0; i < numberOfThread; i++) { if (threads[i] != null) { if (threads[i].ThreadState == System.Threading.ThreadState.Running) { threads[i].Abort(); } } } } DisplayAdvancement("Done"); } }
/// <summary> /// Run cyclic curve reading /// </summary> /// <param name="numberOfCurves">Number of curves to read</param> /// <param name="waitUntilFinished">Configure if should be waited until a curve is read or not</param> /// <returns>true: if execution worked, false: if an error occurred</returns> public bool RunViaMenu(int numberOfCurves, bool waitUntilFinished) { try { // 2013-06-26 - EC - Changed Type Text to Element Element text = (new DiagramElements()).CurveDataNumber; if (new IsDTMConnected().Run()) { if ((new RunCyclicRead()).ViaMenu()) { Log.Info(LogInfo.Namespace(MethodBase.GetCurrentMethod()), "Cyclic reading started"); if (text != null) { const string Separator = "/"; string curvesIni = text.GetAttributeValueText("WindowText"); string curvesNew = text.GetAttributeValueText("WindowText"); int separatorPositionIni = -1; int curvesIniCount = -1; int separatorPositionNew = -1; int curvesNewCount = -1; if (curvesIni != null) { separatorPositionIni = curvesIni.IndexOf(Separator, StringComparison.Ordinal); curvesIni = curvesIni.Substring(0, separatorPositionIni); curvesIniCount = Convert.ToInt16(curvesIni); } if (curvesNew != null) { separatorPositionNew = curvesNew.IndexOf(Separator, StringComparison.Ordinal); curvesNew = curvesNew.Substring(0, separatorPositionNew); curvesNewCount = Convert.ToInt16(curvesNew); } while (curvesNewCount < (curvesIniCount + numberOfCurves)) { string curveReminder = curvesNew; curvesNew = text.GetAttributeValueText("WindowText"); if (curvesNew != null) { separatorPositionNew = curvesNew.IndexOf(Separator, StringComparison.Ordinal); curvesNew = curvesNew.Substring(0, separatorPositionNew); curvesNewCount = Convert.ToInt16(curvesNew); } else { curvesNew = curveReminder; } } if (waitUntilFinished) { if ((new RunEndReadWrite()).ViaMenu()) { var watch = new Stopwatch(); watch.Start(); while ((new ReadAndWrite()).IsReading()) { if (watch.ElapsedMilliseconds <= DefaultValues.GeneralTimeout) { continue; } Log.Error(LogInfo.Namespace(MethodBase.GetCurrentMethod()), "Cyclic reading did not finish within " + DefaultValues.GeneralTimeout + " milliseconds"); watch.Stop(); return(false); } watch.Stop(); Log.Info(LogInfo.Namespace(MethodBase.GetCurrentMethod()), "Cyclic reading finished after " + watch.ElapsedMilliseconds + " milliseconds. (Timeout: " + DefaultValues.GeneralTimeout + " milliseconds)"); return(true); } Log.Error(LogInfo.Namespace(MethodBase.GetCurrentMethod()), "Function [RunEndReadWrite.ViaMenu] was not Executiond."); return(false); } Log.Info(LogInfo.Namespace(MethodBase.GetCurrentMethod()), "Cyclic reading started"); return(true); } Log.Error(LogInfo.Namespace(MethodBase.GetCurrentMethod()), "Textfield is not accessable."); return(false); } Log.Error(LogInfo.Namespace(MethodBase.GetCurrentMethod()), "Function [RunCyclicReading.ViaMenu] was not Executiond."); return(false); } Log.Error(LogInfo.Namespace(MethodBase.GetCurrentMethod()), "Module is not online."); return(false); } catch (Exception exception) { Log.Error(LogInfo.Namespace(MethodBase.GetCurrentMethod()), exception.Message); return(false); } }
public void Run() { /*Task.Run(() => * { * while (true) * { * Thread.Sleep(5000); * GC.Collect(); * GC.WaitForPendingFinalizers(); * * GC.Collect(); * } * });*/ // Flush geometry megabuffers for editor geometry Renderer.GeometryBufferAllocator.FlushStaging(); long previousFrameTicks = 0; Stopwatch sw = new Stopwatch(); sw.Start(); while (_window.Exists) { bool focused = _window.Focused; if (!focused) { _desiredFrameLengthSeconds = 1.0 / 20.0f; } else { _desiredFrameLengthSeconds = 1.0 / 60.0f; } long currentFrameTicks = sw.ElapsedTicks; double deltaSeconds = (currentFrameTicks - previousFrameTicks) / (double)Stopwatch.Frequency; while (_limitFrameRate && deltaSeconds < _desiredFrameLengthSeconds) { currentFrameTicks = sw.ElapsedTicks; deltaSeconds = (currentFrameTicks - previousFrameTicks) / (double)Stopwatch.Frequency; System.Threading.Thread.Sleep(focused ? 0 : 1); } previousFrameTicks = currentFrameTicks; InputSnapshot snapshot = null; Sdl2Events.ProcessEvents(); snapshot = _window.PumpEvents(); InputTracker.UpdateFrameInput(snapshot, _window); Update((float)deltaSeconds); if (!_window.Exists) { break; } if (_window.Focused) { Draw(); } else { // Flush the background queues Renderer.Frame(null, true); } } //DestroyAllObjects(); Resource.ResourceManager.Shutdown(); _gd.Dispose(); CFG.Save(); System.Windows.Forms.Application.Exit(); }
public MainWindowViewModel() { stopwatch.Start(); StartVariableWorkloadThread(); StartThreadPoolMonitor(); }
public static void StartStopwatch() { stopwatch.Start(); }
static void Main() { // constants & configuration const char wallSymbol = '█'; const char player = '☻'; const char coin = '©'; const int escapeX = 14; const int escapeY = 1; const int consoleWidth = 16; const int consoleHeight = 8; const ConsoleColor defaultBackgroundColor = ConsoleColor.White; const ConsoleColor defaultForegroundColor = ConsoleColor.Black; const ConsoleColor playerColor = ConsoleColor.DarkGreen; // console prep Console.BackgroundColor = defaultBackgroundColor; Console.Clear(); Console.WindowHeight = consoleHeight; Console.BufferHeight = consoleHeight; Console.WindowWidth = consoleWidth; Console.BufferWidth = consoleWidth; Console.OutputEncoding = Encoding.UTF8; Console.CursorVisible = false; // setup the data string[] maze = new string[] { "███████████████", "█ © █ ", "█ █████████ █", "█ █© █", "█ ███████ ███", "█ █", "███████████████" }; // player stats int playerX = 1; int playerY = 5; int coinsCollected = 0; // timer Stopwatch timer = new Stopwatch(); timer.Start(); // while the player is in the maze, print, read and handle input while (playerX != escapeX || playerY != escapeY) { // check if the player is standing on a coin if (maze[playerY][playerX] == coin) { coinsCollected++; maze[playerY] = maze[playerY].Replace(coin, ' '); } // print the maze Console.SetCursorPosition(0, 0); Console.ForegroundColor = defaultForegroundColor; Console.Write(string.Join(Environment.NewLine, maze)); // print the player Console.SetCursorPosition(playerX, playerY); Console.ForegroundColor = playerColor; Console.Write(player); // read user input ConsoleKeyInfo keyInfo = Console.ReadKey(); // move if there is not a wall in our way switch (keyInfo.Key) { case ConsoleKey.UpArrow: if (maze[playerY - 1][playerX] != wallSymbol) { playerY--; } break; case ConsoleKey.DownArrow: if (maze[playerY + 1][playerX] != wallSymbol) { playerY++; } break; case ConsoleKey.RightArrow: if (maze[playerY][playerX + 1] != wallSymbol) { playerX++; } break; case ConsoleKey.LeftArrow: if (maze[playerY][playerX - 1] != wallSymbol) { playerX--; } break; default: break; } } timer.Stop(); string message = timer.Elapsed.Seconds <= 15 ? "You win!" : "Be faster!"; Console.Clear(); Console.SetCursorPosition(0, 0); Console.WriteLine(message); Console.WriteLine("{0} seconds, {2} {1} coins collected", timer.Elapsed.Seconds, coinsCollected, Environment.NewLine); }
private void WriteFlood(CommandProcessorContext context, string eventStreamId, int clientsCnt, long requestsCnt) { context.IsAsync(); var threads = new List <Thread>(); var doneEvent = new ManualResetEventSlim(false); long all = 0; long succ = 0; long fail = 0; var sw = new Stopwatch(); var sw2 = new Stopwatch(); for (int i = 0; i < clientsCnt; i++) { var count = requestsCnt / clientsCnt + ((i == clientsCnt - 1) ? requestsCnt % clientsCnt : 0); long sent = 0; long received = 0; threads.Add(new Thread(() => { var esId = eventStreamId ?? ("es" + Guid.NewGuid()); var client = new HttpAsyncClient(); Action onReceived = () => { Interlocked.Increment(ref received); var localAll = Interlocked.Increment(ref all); if (localAll % 10000 == 0) { var elapsed = sw2.Elapsed; sw2.Restart(); context.Log.Trace("\nDONE TOTAL {0} WRITES IN {1} ({2:0.0}/s).", localAll, elapsed, 1000.0 * 10000 / elapsed.TotalMilliseconds); } if (localAll == requestsCnt) { doneEvent.Set(); } }; Action <HttpResponse> onSuccess = response => { if (response.HttpStatusCode == HttpStatusCode.Created) { if (Interlocked.Increment(ref succ) % 100 == 0) { Console.Write('.'); } } else { var localFail = Interlocked.Increment(ref fail); if (localFail % 100 == 0) { Console.Write('#'); } if (localFail % 10 == 0) { context.Log.Info("ANOTHER 10th WRITE FAILED. [{0}] - [{1}]", response.HttpStatusCode, response.StatusDescription); } } onReceived(); }; Action <Exception> onException = exc => { context.Log.ErrorException(exc, "Error during POST."); if (Interlocked.Increment(ref fail) % 100 == 0) { Console.Write('#'); } onReceived(); }; for (int j = 0; j < count; ++j) { var url = context.Client.HttpEndpoint.ToHttpUrl("/streams/{0}", esId); var write = new[] { new HttpClientMessageDto.ClientEventText(Guid.NewGuid(), "type", "DATA" + new string('*', 256), "METADATA" + new string('$', 100)) }; var requestString = Codec.Json.To(write); client.Post( url, requestString, Codec.Json.ContentType, new Dictionary <string, string> { }, TimeSpan.FromMilliseconds(10000), onSuccess, onException); var localSent = Interlocked.Increment(ref sent); while (localSent - Interlocked.Read(ref received) > context.Client.Options.WriteWindow / clientsCnt) { Thread.Sleep(1); } } }) { IsBackground = true }); } sw.Start(); sw2.Start(); threads.ForEach(thread => thread.Start()); doneEvent.Wait(); sw.Stop(); var reqPerSec = (all + 0.0) / sw.ElapsedMilliseconds * 1000; context.Log.Info("Completed. Successes: {0}, failures: {1}", Interlocked.Read(ref succ), Interlocked.Read(ref fail)); context.Log.Info("{0} requests completed in {1}ms ({2:0.00} reqs per sec).", all, sw.ElapsedMilliseconds, reqPerSec); PerfUtils.LogData(Keyword, PerfUtils.Row(PerfUtils.Col("clientsCnt", clientsCnt), PerfUtils.Col("requestsCnt", requestsCnt), PerfUtils.Col("ElapsedMilliseconds", sw.ElapsedMilliseconds)), PerfUtils.Row(PerfUtils.Col("successes", succ), PerfUtils.Col("failures", fail))); PerfUtils.LogTeamCityGraphData(string.Format("{0}-{1}-{2}-reqPerSec", Keyword, clientsCnt, requestsCnt), (int)reqPerSec); PerfUtils.LogTeamCityGraphData(string.Format("{0}-{1}-{2}-failureSuccessRate", Keyword, clientsCnt, requestsCnt), (int)(100.0 * fail / (fail + succ))); if (Interlocked.Read(ref succ) != requestsCnt) { context.Fail(reason: "There were errors or not all requests completed."); } else { context.Success(); } }
public long BucketSort() { ArrayList List = cloneList(); Stopwatch sw = new Stopwatch(); sw.Start(); //Number of buckets int numBuckets = Convert.ToInt16(0.1 * List.Count); if (numBuckets < 3) { numBuckets = 3; } //Find Bucket Range int max = -1; int min = int.MaxValue; for (int x = 0; x < List.Count; x++) { int curVal = getNumericalValue(List[x]); if (curVal > max) { max = curVal; } if (curVal < min) { min = curVal; } } int size = ((max - min) / numBuckets) + 1; //Initialise Buclets ArrayList[] Buckets = new ArrayList[numBuckets]; for (int i = 0; i < Buckets.Length; i++) { Buckets[i] = new ArrayList(); } for (int i = 0; i < List.Count; i++) { //Find bucket current value must be placed in int curVal = getNumericalValue(List[i]); int bucket = (curVal - min) / size; ArrayList curBucket = Buckets[bucket]; //Add current value to correct place in bucket int pos = findIndex(List[i], curBucket); curBucket.Insert(pos, List[i]); } //Gather all values int index = 0; for (int i = 0; i < Buckets.Length; i++) { foreach (object cur in Buckets[i]) { List[index] = cur; index++; } } sw.Stop(); //Display(List); return(sw.ElapsedTicks); }
public override void Train() { var graph = Config.IsImportingGraph ? ImportGraph() : BuildGraph(); using (var sess = tf.Session(graph)) { sess.run(tf.global_variables_initializer()); var saver = tf.train.Saver(tf.global_variables()); var train_batches = batch_iter(train_x, train_y, BATCH_SIZE, NUM_EPOCHS); var num_batches_per_epoch = (len(train_x) - 1) / BATCH_SIZE + 1; Tensor is_training = graph.OperationByName("is_training"); Tensor model_x = graph.OperationByName("x"); Tensor model_y = graph.OperationByName("y"); Tensor loss = graph.OperationByName("loss/Mean"); Operation optimizer = graph.OperationByName("loss/Adam"); Tensor global_step = graph.OperationByName("Variable"); Tensor accuracy = graph.OperationByName("accuracy/accuracy"); var sw = new Stopwatch(); sw.Start(); int step = 0; foreach (var(x_batch, y_batch, total) in train_batches) { (_, step, loss_value) = sess.run((optimizer, global_step, loss), (model_x, x_batch), (model_y, y_batch), (is_training, true)); if (step % 10 == 0) { Console.WriteLine($"Training on batch {step}/{total} loss: {loss_value.ToString("0.0000")} {sw.ElapsedMilliseconds}ms."); sw.Restart(); } if (step % 100 == 0) { // Test accuracy with validation data for each epoch. var valid_batches = batch_iter(test_x, test_y, BATCH_SIZE, 1); var(sum_accuracy, cnt) = (0.0f, 0); foreach (var(valid_x_batch, valid_y_batch, total_validation_batches) in valid_batches) { var valid_feed_dict = new FeedDict { [model_x] = valid_x_batch, [model_y] = valid_y_batch, [is_training] = false }; float accuracy_value = sess.run(accuracy, (model_x, valid_x_batch), (model_y, valid_y_batch), (is_training, false)); sum_accuracy += accuracy_value; cnt += 1; } var valid_accuracy = sum_accuracy / cnt; print($"\nValidation Accuracy = {valid_accuracy.ToString("P")}\n"); // Save model if (valid_accuracy > max_accuracy) { max_accuracy = valid_accuracy; saver.save(sess, $"{dataDir}/word_cnn.ckpt", global_step: step); print("Model is saved.\n"); } } } } }