public T GetObject() { lock (createObjectFunc) { watch?.Restart(); var limitReached = false; do { if (internalPool.TryTake(out var item)) { if (item.CanBeReused) { return(item); } (item as IDisposable)?.Dispose(); aliveObjects--; } limitReached = aliveObjects >= MaxConcurrency; if (limitReached) { Thread.Sleep(1); } } while (limitReached); aliveObjects++; return(createObjectFunc()); } }
public virtual void Update() { if (isStart == false) { return; } Currentframe++; stopwatch?.Restart(); //Log.Trace("Update GOGOGOGOGO"); _handlerComponent?.Update(); stopwatch?.Stop(); //if (stopwatch.ElapsedMilliseconds > 0) // Log.Trace(Currentframe + "Update _handlerComponent 时间:" + stopwatch.ElapsedMilliseconds); stopwatch?.Restart(); _envirinfoComponent?.Tick(); stopwatch?.Stop(); //if (stopwatch.ElapsedMilliseconds > 0) // Log.Trace(Currentframe + "Update _envirinfoComponent 时间:" + stopwatch.ElapsedMilliseconds); stopwatch?.Restart(); _taskEventComponent?.Update(); stopwatch?.Stop(); //if (stopwatch.ElapsedMilliseconds > 0) // Log.Trace(Currentframe + "Update _taskEventComponent 时间:" + stopwatch.ElapsedMilliseconds); }
static void Main() { var context = new SoftUniEntities(); // Established connection with the base //var count = context.Employees.Count(); //Console.WriteLine(count); //for (int image = 0; image < 1000; image++) //{ // context.Images.Add(new Image() // { // ImageBase64 = new string('-', 10) + image // }); // Console.WriteLine(image); //} //context.SaveChanges(); var sw = new Stopwatch(); sw.Start(); EfExtensionsDelete(context); Console.WriteLine("Standart: {0}", sw.Elapsed); sw.Restart(); NativeDelete(context); Console.WriteLine("Native: {0}", sw.Elapsed); sw.Restart(); EfStandartDelete(context); Console.WriteLine("Extensions: {0}", sw.Elapsed); }
internal static void Run() { var f = new Form() { Width = 400, Height = 300 }; var b = new Button() { Text = "Run", Dock = DockStyle.Fill, Font = new Font("Consolas", 18) }; f.Controls.Add(b); b.Click += async delegate { b.Text = "... Running ... "; await Task.WhenAll(WithSyncCtx(), WithoutSyncCtx()); // warm-up var sw = new Stopwatch(); sw.Restart(); await WithSyncCtx(); var withTime = sw.Elapsed; sw.Restart(); await WithoutSyncCtx(); var withoutTime = sw.Elapsed; b.Text = string.Format("With : {0}\nWithout : {1}\n\nDiff : {2:F2}x", withTime, withoutTime, withTime.TotalSeconds / withoutTime.TotalSeconds); }; f.ShowDialog(); }
static void Main() { Stopwatch watch = new Stopwatch(); // SQRT Console.WriteLine("SQRT:"); watch.Restart(); Sqrt.FloatSqrt(1f, 10000000f, 1f); watch.Stop(); Console.WriteLine("Float: " + watch.ElapsedMilliseconds); watch.Restart(); Sqrt.DoubleSqrt(1d, 10000000d, 1d); watch.Stop(); Console.WriteLine("Double: " + watch.ElapsedMilliseconds); watch.Restart(); Sqrt.DecimalSqrt(1m, 10000000m, 1m); watch.Stop(); Console.WriteLine("Decimal: " + watch.ElapsedMilliseconds); // Sinus Console.WriteLine("Sinus:"); watch.Restart(); Sinus.FloatSinus(1f, 10000000f, 1f); watch.Stop(); Console.WriteLine("Float: " + watch.ElapsedMilliseconds); watch.Restart(); Sinus.DoubleSinus(1d, 10000000d, 1d); watch.Stop(); Console.WriteLine("Double: " + watch.ElapsedMilliseconds); watch.Restart(); Sinus.DecimalSinus(1m, 10000000m, 1m); watch.Stop(); Console.WriteLine("Decimal: " + watch.ElapsedMilliseconds); // Logarithm Console.WriteLine("Logarithm:"); watch.Restart(); Logarithm.FloatLogarithm(1f, 10000000f, 1f); watch.Stop(); Console.WriteLine("Float: " + watch.ElapsedMilliseconds); watch.Restart(); Logarithm.DoubleLogarithm(1d, 10000000d, 1d); watch.Stop(); Console.WriteLine("Double: " + watch.ElapsedMilliseconds); watch.Restart(); Logarithm.DecimalLogarithm(1m, 10000000m, 1m); watch.Stop(); Console.WriteLine("Decimal: " + watch.ElapsedMilliseconds); }
internal void ActualQueueMessage(MessageMetadata mp) { #if ACTIVEDEBUG Emergency.Diags?.Log($"Message queued " + mp.Body); #endif if (shutdownRequested) { throw new NotSupportedException("ShutdownEnabled, this method should not be called when shutdown has begun"); } if (!HaveHandlers) { return; } // No handlers, just throw the content away. if (messageQueueMaximum > 0) { // Remove oldest messages until we come down below the limit again. while (messageQueue.Count >= messageQueueMaximum) { while (!messageQueue.TryDequeue(out MessageMetadata mpx)) { ; } } } messageQueue.Enqueue(mp); elapsedTimer?.Restart(); queuedMessageResetEvent.Set(); }
private void Heartbeat() { if (_heartbeatHandler == null) { return; } if (!_heartRateMonitor.IsRunning) { _heartRateMonitor.Start(); return; } if (_numberOfException > 0 || _heartRateMonitor?.ElapsedMilliseconds < _heartRateMilliseconds) { return; } _logger.Debug("Heartbeat Handler called {QueueName}", _queueName); try { _heartbeatHandler.Invoke(); } catch (Exception exception) { _logger.Warning(exception, "Error in Heartbeat Handler {QueueName}", _queueName); } _heartRateMonitor?.Restart(); }
static void Main() { TelerikAcademyEntities db = new TelerikAcademyEntities(); Stopwatch sw = new Stopwatch(); using (db) { sw.Start(); IEnumerable query = db.Employees.ToList() .Select(x => x.Address).ToList() .Select(t => t.Town).ToList() .Where(t => t.Name == "Sofia"); sw.Stop(); Console.WriteLine("Slow: {0}", sw.Elapsed); // made 644 queries sw.Restart(); IEnumerable querySmart = db.Employees .Select(x => x.Address) .Select(t => t.Town) .Where(t => t.Name == "Sofia").ToList(); sw.Stop(); Console.WriteLine("Fast: {0}", sw.Elapsed); // made 2 queries } }
public void CloseStream(Guid file) { timeoutStopwatch?.Restart(); if (openFileStreams.TryGetValue(file, out FileStream stream)) { stream.Close(); } }
private static void ProgressMessageChanged(object sender, string e) { progressMsg = e; lock (consoleLock) { Console.WriteLine(progressMsg); } ClearLastLine(); stopwatch?.Restart(); }
public void ShowListUI(PanelController panel) { SleepTimer?.Restart(); disableButtons = true; NavController.curPanel = ListController.VideoList[0].gameObject.GetComponent <NavObject>(); VideoListUI.SetActive(true); StartCoroutine(ShowVideos()); NavController.indicator.transform.localScale = new Vector3(1.95f, 2.4f); StartCoroutine(HideCategories()); }
// Note: I have referenced the AdsEntities from the previous project // instead of creating another one in this project. static void Main() { /* * Using Entity Framework select all ads from the database, * then invoke ToList(), then filter the categories whose status is Published; * then select the ad title, category and town, then invoke ToList() * again and finally order the ads by publish date. * Rewrite the same query in a more optimized way and compare the performance. */ var context = new AdsEntities(); Stopwatch sw = new Stopwatch(); sw.Start(); for (int count = 0; count < 10; count++) { var ads = context.Ads .Select(a => a) .ToList() .Where(a => a.AdStatus != null && a.AdStatus.Status == "Published") .Select(a => new { Title = a.Title, Category = a.Category != null ? a.Category.Name : "(null)", Town = a.Town != null ? a.Town.Name : "(null)", PublishDate = a.Date }) .ToList() .OrderBy(a => a.PublishDate); } // Unoptimised: 5 seconds Console.WriteLine("Unoptimised: {0}", sw.Elapsed); sw.Restart(); for (int count = 0; count < 10; count++) { var ads = context.Ads .Where(a => a.AdStatus != null && a.AdStatus.Status == "Published") .Select(a => new { Title = a.Title, Category = a.Category != null ? a.Category.Name : "(null)", Town = a.Town != null ? a.Town.Name : "(null)", PublishDate = a.Date }) .OrderBy(a => a.PublishDate) .ToList(); } // Optimised: 0.5 seconds Console.WriteLine("Optimised: {0}", sw.Elapsed); sw.Stop(); }
static void Main(string[] args) { int[] oPos = new int[4];//0to1 - position, 2-3 - other status string[,] screen = new string[25, 55]; int[,] projectiles = new int[25, 55]; int difficulty = 420; //fills screen with space for (int i = 0; i < 25; i++) { for (int j = 0; j < 55; j++) { screen[i, j] = " "; } } for (int i = 0; i < 25; i++) { for (int j = 0; j < 55; j++) { projectiles[i, j] = 0; } } oPos[0] = 24;//i oPos[1] = 22;//j Stopwatch highScoreSw = new Stopwatch(); Stopwatch diffSw = new Stopwatch(); diffSw.Start(); highScoreSw.Start(); while (true) { if (diffSw.Elapsed.Seconds.CompareTo(5) == 1 && difficulty != 20 && diffSw.Elapsed.Seconds < 35) { difficulty -= 50; diffSw.Restart(); } Console.Clear(); GetInput(oPos); ProjMovementAndCreation(projectiles, difficulty); DrawScreen(screen, projectiles, oPos, highScoreSw); CollisionDetection(projectiles, oPos, ref difficulty); Thread.Sleep(100); } }
private static void IncrementOccuranceCountTrie(Stopwatch sw, TrieNode start, MatchCollection allWords) { sw.Restart(); foreach (var word in allWords) { start.AddOccuranceIfExists(start, word.ToString()); } sw.Stop(); Console.WriteLine("Adding searched words count trie for: {0}", sw.Elapsed); }
static void Main() { var context = new AdsEntities1(); var stopWatch = new Stopwatch(); stopWatch.Start(); NotOptimizedMethod(context); Console.WriteLine("Not optimized method: {0}", stopWatch.Elapsed); stopWatch.Restart(); OptimizedMethod(context); Console.WriteLine("Optimized method: {0}", stopWatch.Elapsed); }
/// <summary> /// 对物理引擎和Actor对象的逻辑进行Tick /// </summary>\ public void Tick() { stopwatch?.Restart(); for (int i = 0; i < _actorList.Count; i++) { _actorList[i].Update(); //if (_actorList[i].IsWeapon()) //Log.Trace("EnvirinfoComponentBase:ActorId" + _actorList[i].GetActorID() + " ActorType" + _actorList[i].GetActorType() + " 位置坐标:" + _actorList[i].GetPosition() + " 朝向" + _actorList[i].GetForward() + " 力" + _actorList[i].GetForce() + " 速度" + _actorList[i].GetVelocity() + " 转矩" + _actorList[i].GetAngleVelocity()); //if(_actorList[i].GetActorType() == ActorTypeBaseDefine.ContinuousLaserActor) //Log.Trace("EnvirinfoComponentBase:ActorId" + _actorList[i].GetActorID() + " ActorType" + _actorList[i].GetActorType() + "Fixture Count" + ((IBaseComponentContainer)_actorList[i]).GetPhysicalinternalBase().GetBody().FixtureList.Count + " IsSenior" + ((IBaseComponentContainer)_actorList[i]).GetPhysicalinternalBase().GetBody().FixtureList[0].IsSensor); } stopwatch?.Stop(); //if (stopwatch.ElapsedMilliseconds > 0) // Log.Trace("Tick _actorList:" + stopwatch.ElapsedMilliseconds); stopwatch?.Restart(); m_runner.Update(); stopwatch?.Stop(); //if (stopwatch.ElapsedMilliseconds > 0) // Log.Trace("Tick m_runner:" + stopwatch.ElapsedMilliseconds); }
public static void SendMessage(int seconds) { Stopwatch sw = new Stopwatch(); sw.Start(); while (true) { if (sw.ElapsedMilliseconds == seconds * 1000) { Console.WriteLine("Surprise"); sw.Restart(); } } }
public void startTimer(int t) { Stopwatch sw = new Stopwatch(); sw.Start(); while (true) { if (sw.ElapsedMilliseconds >= t * 1000) { Console.WriteLine("Boom"); sw.Restart(); } } }
private static void AddWordsForSearchInDictionary(Stopwatch sw, List<string> words, Dictionary<string, int> wordsInDictionary) { sw.Restart(); foreach (var item in words) { string word = item.ToString(); if (!wordsInDictionary.ContainsKey(word)) { wordsInDictionary[word] = 0; } } sw.Stop(); Console.WriteLine("Time to populate dictionary: {0}\n\n", sw.Elapsed); }
internal static void Run() { var sw = new Stopwatch(); const int ITERS = 10000000; EmptyBody(); EmptyBodyAsync(); while (true) { sw.Restart(); for (int i = 0; i < ITERS; i++) EmptyBody(); var emptyBodyTime = sw.Elapsed; sw.Restart(); for (int i = 0; i < ITERS; i++) EmptyBodyAsync(); var emptyBodyAsyncTime = sw.Elapsed; Console.WriteLine("Sync : {0}", emptyBodyTime); Console.WriteLine("Async : {0}", emptyBodyAsyncTime); Console.WriteLine("-- {0:F1}x --", emptyBodyAsyncTime.TotalSeconds / emptyBodyTime.TotalSeconds); } }
internal static void Run() { string url = "http://www.microsoft.com"; GetContents1Async(url).Wait(); GetContents2Async(url).Wait(); var sw = new Stopwatch(); while (true) { sw.Restart(); for (int i = 0; i < ITERS; i++) GetContents1Async(url).Wait(); var cacheStringTime = sw.Elapsed; sw.Restart(); for (int i = 0; i < ITERS; i++) GetContents2Async(url).Wait(); var cacheTaskTime = sw.Elapsed; Console.WriteLine("Cache string : {0}", cacheStringTime); Console.WriteLine("Cache task : {0}", cacheTaskTime); Console.WriteLine("---- {0:F2}x ----", cacheStringTime.TotalSeconds / cacheTaskTime.TotalSeconds); Console.ReadLine(); } }
internal static void Run() { var sw = new Stopwatch(); while (true) { CallContext.LogicalSetData("Foo", "Bar"); // changes from default context sw.Restart(); DoWorkAsync().Wait(); var withTime = sw.Elapsed; CallContext.FreeNamedDataSlot("Foo"); // back to default context sw.Restart(); DoWorkAsync().Wait(); var withoutTime = sw.Elapsed; Console.WriteLine("With : {0}", withTime); Console.WriteLine("Without : {0}", withoutTime); Console.WriteLine("---- {0:F2}x ----", withTime.TotalSeconds / withoutTime.TotalSeconds); Console.ReadLine(); } }
static void Main() { var context = new AdsEntities(); context.Database.ExecuteSqlCommand("CHECKPOINT; DBCC DROPCLEANBUFFERS;"); var sw = new Stopwatch(); Console.WriteLine(context.Ads.Any()); sw.Start(); // Messy query var ads = context.Ads .ToList() .Where(a => a.AdStatus.Status == "Published") .Select(a => new { Title = a.Title, Category = a.Category, Town = a.Town, Date = a.Date }) .ToList() .OrderBy(a => a.Date); Console.WriteLine("Millisecond with a messy query: " + sw.ElapsedMilliseconds + "ms"); sw.Restart(); var adsImproved = context.Ads .Where(a => a.AdStatus.Status == "Published") .Select(a => new { Title = a.Title, Category = a.Category, Town = a.Town, Date = a.Date }) .OrderBy(a => a.Date) .ToList(); Console.WriteLine("Millisecond with a proper query: " + sw.ElapsedMilliseconds + "ms"); // TEST RESULTS: //+---------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+--------+---------+ //| | Run 1 | Run 2 | Run 3 | Run 4 | Run 5 | Run 6 | Run 7 | Run 8 | Run 9 | Run 10 | Average | //+---------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+--------+---------+ //| Non-optimized | 237 | 245 | 243 | 247 | 256 | 237 | 236 | 266 | 236 | 237 | 244ms | //+---------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+--------+---------+ //| Optimized | 123 | 122 | 125 | 128 | 121 | 123 | 123 | 121 | 122 | 123 | 123ms | //+---------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+--------+---------+ // Improvement - Almost 2 (1.98) times faster. }
private static void IncrementOccuranceCountDictionary(Stopwatch sw, Dictionary<string, int> wordsInDictionary, MatchCollection allWords) { sw.Restart(); foreach (var word in allWords) { string wordStr = word.ToString(); if (wordsInDictionary.ContainsKey(wordStr)) { wordsInDictionary[wordStr] += 1; } } sw.Stop(); Console.WriteLine("Adding searched words count dictionary for: {0}\n", sw.Elapsed); }
// Note: I have referenced the AdsEntities from the previous project // instead of creating another one in this project. static void Main() { /* * Write a program to compare the execution speed between these two scenarios: * Select everything from the Ads table and print only the ad title. * Select the ad title from Ads table and print it. */ var context = new AdsEntities(); Stopwatch sw = new Stopwatch(); sw.Start(); for (int count = 0; count < 10; count++) { var ads = context.Ads.Select(a => a); foreach (var ad in ads) { Console.WriteLine(ad.Title); } } TimeSpan unoptimisedTime = sw.Elapsed; sw.Restart(); for (int count = 0; count < 10; count++) { var ads = context.Ads.Select(a => a.Title); foreach (var adTitle in ads) { Console.WriteLine(adTitle); } } TimeSpan optimisedTime = sw.Elapsed; sw.Stop(); /* * Unoptimised: 5.5 seconds * Optimised: 0.2 seconds */ Console.WriteLine(); Console.WriteLine("Unoptimised: {0}", unoptimisedTime); Console.WriteLine("Optimised: {0}", optimisedTime); }
static void Main() { var context = new AdsEntities(); var stopwatch = new Stopwatch(); Console.WriteLine(context.Ads.Any()); stopwatch.Start(); var allAdsNoInclude = context.Ads.ToList(); Console.WriteLine(stopwatch.ElapsedMilliseconds); //foreach (var ad in allAdsNoInclude) //{ // Console.WriteLine("Ad Title: {0}, Ad Status: {1}, Ad Category: {2}, Ad Town: {3}, Ad User: {4}", // ad.Title, ad.AdStatus.Status, (ad.Category == null ? "no category" : ad.Category.Name), (ad.Town == null ? "no town" : ad.Town.Name), ad.AspNetUser.Name); //} stopwatch.Restart(); var allAdsInclude = context.Ads .Include("Category") .Include("Town") .Include("AspNetUser") .Include("AdStatus") .ToList(); Console.WriteLine(stopwatch.ElapsedMilliseconds); //foreach (var ad in allAdsInclude) //{ // Console.WriteLine("Ad Title: {0}, Ad Status: {1}, Ad Category: {2}, Ad Town: {3}, Ad User: {4}", // ad.Title, ad.AdStatus.Status, (ad.Category == null ? "no category" : ad.Category.Name), (ad.Town == null ? "no town" : ad.Town.Name), ad.AspNetUser.Name); //} // TESTS RESULTS: //+--------------------------+-----------------+-------------------+ //| | No Include(...) | With Include(...) | //+--------------------------+-----------------+-------------------+ //| Number of SQL Statements | 29 | 1 | //+--------------------------+-----------------+-------------------+ //| Milliseconds to complete | 123 | 164 | //+--------------------------+-----------------+-------------------+ }
private void Operation_ProgressChanged(object sender, ProgressChangedEventArgs e) { this.Progress = Math.Min(ProgressMaximum, Math.Max(ProgressMinimum, e.ProgressPercentage)); if (!string.IsNullOrEmpty(this.Operation.ProgressText)) { this.Status = this.Operation.ProgressText; } else { if (this.Wait()) { return; } sw?.Restart(); this.Status = this.Operation.Speed + this.Operation.ETA; } }
static void Main(string[] args) { string[] input = Console.ReadLine().Split(new[] { ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); long number = long.Parse(Console.ReadLine()); long[] numbersArray = new long[input.Length]; for (int i = 0; i < input.Length; i++) { numbersArray[i] = long.Parse(input[i]); } //Timer on Binary Search Stopwatch timer = new Stopwatch(); timer.Start(); int numberIndex = CheckIndexUsingBinarySearch(numbersArray, number); timer.Stop(); if (numberIndex != -1) { Console.WriteLine("Number {0} found at index {1}. Elapsed time: {2}.", number, numberIndex, timer.Elapsed); } else { Console.WriteLine("Number {0} not found.", number); } //Timer on Linear Search timer.Restart(); int numberIndex2 = CheckIndexUsingLinearSearch(numbersArray, number); timer.Stop(); if (numberIndex2 != -1) { Console.WriteLine("Number {0} found at index {1}. Elapsed time: {2}.", number, numberIndex2, timer.Elapsed); } else { Console.WriteLine("Number {0} not found.", number); } }
private async Task SaveFromQueue(CancellationToken stoppingToken, bool runAll = false) { Stopwatch runAllStopwatch = null; if (runAll) { runAllStopwatch = new Stopwatch(); runAllStopwatch?.Restart(); } var count = _queue.Count; for (var i = 0; i < count; i++) { if (stoppingToken.IsCancellationRequested) { return; } if (_queue.TryDequeue(out var result)) { try { await _iterationResultRepository.Save(result); } catch (Exception e) { Console.WriteLine($"{_name} - {e.Message}"); } if (runAll && runAllStopwatch.ElapsedMilliseconds > 1000) { runAllStopwatch.Restart(); Console.WriteLine($"{_queue.Count} remaining.."); } } } runAllStopwatch?.Stop(); }
public static void Main() { var context = new SoftUniEntities(); var totalCount = context.Employees.Count(); var stopWatch = new Stopwatch(); stopWatch.Start(); PrintNamesWithNativeQuery(context); var res = stopWatch.Elapsed; stopWatch.Restart(); PrintNamesWithLinqQuery(context); var res2 = stopWatch.Elapsed; Console.WriteLine("Native : {0}", res); Console.WriteLine("Linq : {0}", res2); stopWatch.Stop(); }
public override IAccountStateDelta Execute(IActionContext context) { var states = context.PreviousStates; var shardedShopAddress = ShardedShopStateV2.DeriveAddress(itemSubType, orderId); var inventoryAddress = sellerAvatarAddress.Derive(LegacyInventoryKey); var worldInformationAddress = sellerAvatarAddress.Derive(LegacyWorldInformationKey); var questListAddress = sellerAvatarAddress.Derive(LegacyQuestListKey); var digestListAddress = OrderDigestListState.DeriveAddress(sellerAvatarAddress); var itemAddress = Addresses.GetItemAddress(tradableId); if (context.Rehearsal) { states = states.SetState(shardedShopAddress, MarkChanged); return(states .SetState(inventoryAddress, MarkChanged) .SetState(worldInformationAddress, MarkChanged) .SetState(questListAddress, MarkChanged) .SetState(digestListAddress, MarkChanged) .SetState(itemAddress, MarkChanged) .SetState(sellerAvatarAddress, MarkChanged)); } var addressesHex = GetSignerAndOtherAddressesHex(context, sellerAvatarAddress); var sw = new Stopwatch(); sw.Start(); var started = DateTimeOffset.UtcNow; Log.Verbose("{AddressesHex}Sell Cancel exec started", addressesHex); if (!states.TryGetAvatarStateV2(context.Signer, sellerAvatarAddress, out var avatarState, out _)) { throw new FailedLoadStateException( $"{addressesHex}Aborted as the avatar state of the seller failed to load."); } sw.Stop(); Log.Verbose("{AddressesHex}Sell Cancel Get AgentAvatarStates: {Elapsed}", addressesHex, sw.Elapsed); sw.Restart(); if (!avatarState.worldInformation.IsStageCleared(GameConfig.RequireClearedStageLevel.ActionsInShop)) { avatarState.worldInformation.TryGetLastClearedStageId(out var current); throw new NotEnoughClearedStageLevelException(addressesHex, GameConfig.RequireClearedStageLevel.ActionsInShop, current); } if (!states.TryGetState(shardedShopAddress, out BxDictionary shopStateDict)) { throw new FailedLoadStateException($"{addressesHex}failed to load {nameof(ShardedShopStateV2)}({shardedShopAddress})."); } sw.Stop(); Log.Verbose("{AddressesHex}Sell Cancel Get ShopState: {Elapsed}", addressesHex, sw.Elapsed); sw.Restart(); avatarState.updatedAt = context.BlockIndex; avatarState.blockIndex = context.BlockIndex; if (!states.TryGetState(digestListAddress, out Dictionary rawList)) { throw new FailedLoadStateException($"{addressesHex}failed to load {nameof(OrderDigest)}({digestListAddress})."); } var digestList = new OrderDigestListState(rawList); // migration method avatarState.inventory.UnlockInvalidSlot(digestList, context.Signer, sellerAvatarAddress); avatarState.inventory.ReconfigureFungibleItem(digestList, tradableId); avatarState.inventory.LockByReferringToDigestList(digestList, tradableId, context.BlockIndex); // digestList.Remove(orderId); if (!states.TryGetState(Order.DeriveAddress(orderId), out Dictionary orderDict)) { throw new FailedLoadStateException($"{addressesHex}failed to load {nameof(Order)}({Order.DeriveAddress(orderId)})."); } Order order = OrderFactory.Deserialize(orderDict); order.ValidateCancelOrder(avatarState, tradableId); var sellItem = order.Cancel(avatarState, context.BlockIndex); if (context.BlockIndex < order.ExpiredBlockIndex) { var shardedShopState = new ShardedShopStateV2(shopStateDict); shardedShopState.Remove(order, context.BlockIndex); states = states.SetState(shardedShopAddress, shardedShopState.Serialize()); } var expirationMail = avatarState.mailBox.OfType <OrderExpirationMail>() .FirstOrDefault(m => m.OrderId.Equals(orderId)); if (!(expirationMail is null)) { avatarState.mailBox.Remove(expirationMail); } var mail = new CancelOrderMail( context.BlockIndex, orderId, context.BlockIndex, orderId ); avatarState.Update(mail); sw.Stop(); Log.Verbose("{AddressesHex}Sell Cancel Update AvatarState: {Elapsed}", addressesHex, sw.Elapsed); sw.Restart(); states = states .SetState(itemAddress, sellItem.Serialize()) .SetState(digestListAddress, digestList.Serialize()) .SetState(inventoryAddress, avatarState.inventory.Serialize()) .SetState(worldInformationAddress, avatarState.worldInformation.Serialize()) .SetState(questListAddress, avatarState.questList.Serialize()) .SetState(sellerAvatarAddress, avatarState.SerializeV2()); sw.Stop(); Log.Verbose("{AddressesHex}Sell Cancel Set AvatarState: {Elapsed}", addressesHex, sw.Elapsed); sw.Restart(); sw.Stop(); var ended = DateTimeOffset.UtcNow; Log.Verbose("{AddressesHex}Sell Cancel Set ShopState: {Elapsed}", addressesHex, sw.Elapsed); Log.Verbose("{AddressesHex}Sell Cancel Total Executed Time: {Elapsed}", addressesHex, ended - started); return(states); }
static void Main(string[] args) { // create a tcp client TCPClientIMU.TCPClientIMU client = new TCPClientIMU.TCPClientIMU("127.0.0.1", 9001); _accelerometer = Accelerometer.GetDefault(AccelerometerReadingType.Standard); _gyrometer = Gyrometer.GetDefault(); while (true) { Task.Run(async() => { await client.ConnectAsync(); }).Wait(); Random rng = new Random(); Stopwatch stopwatch = new Stopwatch(); stopwatch.Start(); if (_accelerometer != null) { Thread IMUDataThread = new Thread(() => IMUDataPoll()); IMUDataThread.Start(); } try { Task.Run(async() => { while (true) { var elapsed = stopwatch.Elapsed; stopwatch.Restart(); if (_accelerometer == null) { dataPoint = new DataPointViewModel() { aX = elapsed.TotalMilliseconds, aY = rng.NextDouble(), aZ = rng.NextDouble(), gX = rng.NextDouble(), gY = rng.NextDouble(), gZ = rng.NextDouble() }; } if (BUFFER_MODE) { lock (bufLock) { if (dataPoint != null) { dataPointsBuff[bufIndex] = dataPoint; bufIndex++; } } if (bufIndex == BUFFER_SIZE) { var message = JsonConvert.SerializeObject(dataPointsBuff); //Console.WriteLine("Sending message {0}", message); bufIndex = 0; await client.SendMessageToServerTaskAsync(message); if (pktCount == 0) { startTime = nanoTime(); } pktCount++; //Debug.Log("updateCount: " + updateCount); if (pktCount % 100 == 0) { long currentTime = nanoTime(); double timeElapsed = (currentTime - startTime) / 1000000000.0f; Console.WriteLine("IMU sending freq: " + 100 / timeElapsed * BUFFER_SIZE); startTime = currentTime; } } } else { if (pktCount == 0) { startTime = nanoTime(); } pktCount++; //Debug.Log("updateCount: " + updateCount); if (pktCount % 1000 == 0) { long currentTime = nanoTime(); double timeElapsed = (currentTime - startTime) / 1000000000.0f; Console.WriteLine("IMU pkt freq: " + 1000 / timeElapsed); startTime = currentTime; } var message = JsonConvert.SerializeObject(dataPoint); //Console.WriteLine("Sending message {0}", message); await client.SendMessageToServerTaskAsync(message); } Thread.Sleep(3); } }).Wait(); } catch (Exception e) { Console.WriteLine("Exception: {0}, reconnect...", e); } } }
/// <summary> /// /// </summary> /// <param name="Planet">Dataset of a planet</param> /// <param name="JD">Julian Date</param> /// <returns></returns> private double[] Calculate(PlanetTable Planet, double JD) { sw.Restart(); double phi = (JD - 2451545.0d) / 365250d; double[] t = new double[6]; for (int i = 0; i < 6; i++) { t[i] = Math.Pow(phi, i); } double[] Result = new double[6]; double u, cu, su; double tit;//t[it] for (int iv = 0; iv < 6; iv++) { for (int it = 5; it >= 0; it--) { tit = t[it]; if (Planet.variables[iv].PowerTables == null) { continue; } if (Planet.variables[iv].PowerTables[it].Terms == null) { continue; } foreach (Term term in Planet.variables[iv].PowerTables[it].Terms) { u = term.B + term.C * phi; (su, cu) = Math.SinCos(u); Result[iv] += term.A * cu * tit; // Original resolution specification. if (Planet.version == VSOPVersion.VSOP87) { continue; } // Derivative of 3 variables if (it == 0) { Result[iv + 3] += (0 * it * term.A * cu) - (tit * term.A * term.C * su); } else { Result[iv + 3] += (t[it - 1] * it * term.A * cu) - (tit * term.A * term.C * su); } } } } // Original resolution specification. if (Planet.version == VSOPVersion.VSOP87) { return(Result); } for (int ic = 0; ic < 3; ic++) { Result[ic + 3] /= 365250d; } //Modulo Spherical longitude L into [0,2*pi) if (Utility.GetCoordinatesType(Planet.version) == CoordinatesType.Spherical) { Result[0] = ModuloCircle(Result[0]); } sw.Stop(); TimeUsed = sw.Elapsed; return(Result); }
void TestFindReferences(IEntity entity) { if (IgnoreEntity(entity)) { return; } FindReferences fr = new FindReferences(); fr.FindTypeReferencesEvenIfAliased = true; Stopwatch w = new Stopwatch(); var searchScopes = fr.GetSearchScopes(entity); foreach (var project in solution.Projects) { w.Restart(); HashSet <AstNode> foundReferences = new HashSet <AstNode>(); var interestingFiles = new HashSet <CSharpFile>(); foreach (var searchScope in searchScopes) { foreach (var unresolvedFile in fr.GetInterestingFiles(searchScope, project.Compilation)) { var file = project.Files.Single(f => f.FileName == unresolvedFile.FileName); Debug.Assert(file.UnresolvedTypeSystemForFile == unresolvedFile); // Skip file if it doesn't contain the search term if (searchScope.SearchTerm != null && file.OriginalText.IndexOf(searchScope.SearchTerm, StringComparison.Ordinal) < 0) { continue; } interestingFiles.Add(file); } } foreach (var file in interestingFiles) { fr.FindReferencesInFile(searchScopes, file.UnresolvedTypeSystemForFile, file.SyntaxTree, project.Compilation, delegate(AstNode node, ResolveResult result) { foundReferences.Add(node); }, CancellationToken.None); } w.Stop(); if (timings.ContainsKey(entity.EntityType)) { timings[entity.EntityType] += w.Elapsed; } else { timings[entity.EntityType] = w.Elapsed; } IEntity importedEntity = project.Compilation.Import(entity); HashSet <AstNode> expectedReferences; if (importedEntity == null || !referenceDict.TryGetValue(importedEntity, out expectedReferences)) { if (foundReferences.Any()) { // There aren't any expected references stored, but we found some references anyways: Console.WriteLine(); Console.WriteLine("Entity not in reference dictionary: " + entity); } return; } if (foundReferences.Except(expectedReferences).Any()) { Console.WriteLine(); Console.WriteLine("Reference mismatch for " + entity + ":"); var n = foundReferences.Except(expectedReferences).First(); Console.WriteLine("Found unexpected reference " + n + " (" + n.GetRegion() + ")"); } if (expectedReferences.Except(foundReferences).Any()) { Console.WriteLine(); Console.WriteLine("Reference mismatch for " + entity + ":"); var n = expectedReferences.Except(foundReferences).First(); Console.WriteLine("Did not find expected reference " + n + " (" + n.GetRegion() + ")"); } } if (entityCount.ContainsKey(entity.EntityType)) { entityCount[entity.EntityType]++; } else { entityCount[entity.EntityType] = 1; } }
private static void RunExperiment(ExperimentParameters experimentParameters)//, DatabaseContext databaseX) { var database = new DatabaseContext(DatabaseFullPath); var version = new Version(DateTime.Now); var stoper = new Stopwatch(); //if (database.Exists(experimentParameters)) //{ // Console.WriteLine("BYLEM WIDZIALEM"); // //database.Dispose(); // return; //} database.Insert(version); database.Insert(experimentParameters); database.Insert(experimentParameters.EvolutionParameters); IDictionary <int, EvolutionStep> evolutionSteps = null; try { var enginesFactory = new EnginesFactory(); var engine = enginesFactory.Create(experimentParameters); var distanceCalculator = new CanberraDistanceCalculator(); var positivePointsGenerator = new PositivePointsGenerator(); stoper.Restart(); var positiveTrainingPoints = positivePointsGenerator.GeneratePoints(experimentParameters.NumberOfPositivePoints, engine.Benchmark.Domains, engine.Benchmark.Constraints); stoper.Stop(); engine.Statistics.PositiveTrainingPointsGenerationTime = stoper.Elapsed; var negativePointsGenerator = new NegativePointsGenerator(positiveTrainingPoints, distanceCalculator, new NearestNeighbourDistanceCalculator(distanceCalculator)); //var negativeTrainingPoints = negativePointsGenerator.GeneratePoints(experimentParameters.NumberOfNegativePoints, engine.Benchmark.Domains); stoper.Restart(); var negativeTrainingPoints = negativePointsGenerator.GeneratePoints(experimentParameters.NumberOfNegativePoints, engine.Benchmark.Domains, engine.Benchmark.Constraints); stoper.Stop(); engine.Statistics.NegativeTrainingPointsGenerationTime = stoper.Elapsed; //Console.WriteLine("Evolution starts!"); var trainingPoints = positiveTrainingPoints.Concat(negativeTrainingPoints).ToArray(); stoper.Restart(); var mathModel = engine.SynthesizeModel(trainingPoints); stoper.Stop(); engine.Statistics.TotalSynthesisTime = stoper.Elapsed; database.Insert(mathModel); var testPointsGenerator = new TestPointsGenerator(); stoper.Restart(); var testPoints = testPointsGenerator.GeneratePoints(experimentParameters.NumberOfTestPoints, engine.Benchmark.Domains, engine.Benchmark.Constraints); stoper.Stop(); engine.Statistics.TestPointsGenerationTime = stoper.Elapsed; var statistics = engine.EvaluateModel(testPoints); database.Insert(statistics); evolutionSteps = engine.CoreEvolutionSteps; //Logger.Instance.Log(experimentParameters); //Logger.Instance.Log(mathModel); //Logger.Instance.Log(statistics); //database.Insert(Logger.Instance.GetLogAsString()); } catch (Exception exception) { database.Insert(exception); } database.Save(); if (evolutionSteps != null && experimentParameters.TrackEvolutionSteps) { Logger.Instance.Log(evolutionSteps); var logsFullPath = Path.GetFullPath(LogsDirPath + experimentParameters.GetFileName("Log", ".cmplog")); File.WriteAllText(logsFullPath, StringCompressor.CompressString(Logger.Instance.GetLogAsString())); } //GlobalStoper.Stop(); //Console.WriteLine(GlobalStoper.ElapsedMilliseconds); //Console.ReadKey(); database.Dispose(); }
// Comparaes the speed of two parsing methods static void SpeedTest() { string openTag = "<upcase>"; string closeTag = "</upcase>"; // Generate random text with some tags string longText = GenerateRandomText(openTag, closeTag); // Check if text can be parsed Console.WriteLine(ParseText(longText, openTag, closeTag) == ParseTextRegex(longText, openTag, closeTag)); Stopwatch sw = new Stopwatch(); int repeat = 10000; // Measure method 1 sw.Restart(); for (int i = 0; i < repeat; i++) { ParseText(longText, openTag, closeTag); } sw.Stop(); Console.WriteLine(sw.ElapsedMilliseconds); // Measure method 2 sw.Restart(); for (int i = 0; i < repeat; i++) { ParseTextRegex(longText, openTag, closeTag); } sw.Stop(); Console.WriteLine(sw.ElapsedMilliseconds); }
private void ReadFlood(CommandProcessorContext context, string eventStreamId, int clientsCnt, long requestsCnt) { context.IsAsync(); var clients = new List <TcpTypedConnection <byte[]> >(); var threads = new List <Thread>(); var autoResetEvent = new AutoResetEvent(false); var sw2 = new Stopwatch(); long all = 0; for (int i = 0; i < clientsCnt; i++) { var count = requestsCnt / clientsCnt + ((i == clientsCnt - 1) ? requestsCnt % clientsCnt : 0); int sent = 0; int received = 0; var client = context.Client.CreateTcpConnection( context, (conn, pkg) => { Interlocked.Increment(ref received); var localAll = Interlocked.Increment(ref all); if (localAll % 1000 == 0) { Console.Write("."); } if (localAll % 100000 == 0) { var elapsed = sw2.Elapsed; sw2.Restart(); context.Log.Trace("\nDONE TOTAL {0} READS IN {1} ({2:0.0}/s).", localAll, elapsed, 1000.0 * 100000 / elapsed.TotalMilliseconds); } if (localAll == requestsCnt) { autoResetEvent.Set(); } }, connectionClosed: (conn, err) => { if (all < requestsCnt) { context.Fail(null, "Socket was closed, but not all requests were completed."); } else { context.Success(); } }); client.ConnectionClosed += (_, __) => context.Log.Debug("READS sent: {0}, received: {1}", sent, received); clients.Add(client); threads.Add(new Thread(() => { for (int j = 0; j < count; ++j) { var corrid = Guid.NewGuid(); var read = new ClientMessageDto.ReadEvent(eventStreamId, 0, resolveLinkTos: false); var package = new TcpPackage(TcpCommand.ReadEvent, corrid, read.Serialize()); client.EnqueueSend(package.AsByteArray()); Interlocked.Increment(ref sent); while (sent - received > context.Client.Options.ReadWindow) { Thread.Sleep(1); } } })); } var sw = Stopwatch.StartNew(); sw2.Start(); foreach (var thread in threads) { thread.IsBackground = true; thread.Start(); } autoResetEvent.WaitOne(); sw.Stop(); foreach (var client in clients) { client.Close(); } context.Log.Info("Completed. READS done: {0}.", all); var reqPerSec = (requestsCnt + 0.0) / sw.ElapsedMilliseconds * 1000; context.Log.Info("{0} requests completed in {1}ms ({2:0.00} reqs per sec).", requestsCnt, sw.ElapsedMilliseconds, reqPerSec); PerfUtils.LogData( Keyword, PerfUtils.Row(PerfUtils.Col("clientsCnt", clientsCnt), PerfUtils.Col("requestsCnt", requestsCnt), PerfUtils.Col("ElapsedMilliseconds", sw.ElapsedMilliseconds)), PerfUtils.Row(PerfUtils.Col("readsCnt", all)) ); PerfUtils.LogTeamCityGraphData(string.Format("{0}-{1}-{2}-reqPerSec", Keyword, clientsCnt, requestsCnt), (int)reqPerSec); context.Success(); }
public async Task E6Task(params string[] tags) { IDisposable typingDisposable = this.Context.Channel.EnterTypingState(); List <ESixImage> selectedImages = new List <ESixImage>(); ESixImage currentJObject = null; Stopwatch e6Stopwatch = null; UserOptions options; bool getJsonId = tags.Any(e => e.ToLower().Contains("<getid>")); try { if (!File.Exists(Path.Combine(Program.AppPath, "e6options", $"{this.Context.User.Id}.json"))) { options = new UserOptions { Id = this.Context.User.Id, DisplaySources = false, DisplayTags = false, BlackList = new List <string> { "scat", "gore" } }; File.WriteAllText(Path.Combine(Program.AppPath, "e6options", $"{this.Context.User.Id}.json"), JsonConvert.SerializeObject(options)); } else { options = JsonConvert.DeserializeObject <UserOptions>(File.ReadAllText(Path.Combine(Program.AppPath, "e6options", $"{this.Context.User.Id}.json"))); } if (!this.Context.Channel.Name.ToLower().Contains("bot") && !(this.Context.Channel is IDMChannel) && !(this.Context.Channel is SocketDMChannel) && this.Context.Channel.Id != 344718149398036490) { if (e6Stopwatches.TryGetValue(this.Context.Channel.Id, out e6Stopwatch) && e6Stopwatch.IsRunning && e6Stopwatch.ElapsedMilliseconds < 200000) { await this.ReplyAsync($"Please wait {200 - e6Stopwatch.Elapsed.Seconds} seconds until using this command."); return; } if (!e6Stopwatches.ContainsKey(this.Context.Channel.Id)) { e6Stopwatches.Add(this.Context.Channel.Id, new Stopwatch()); e6Stopwatches.TryGetValue(this.Context.Channel.Id, out e6Stopwatch); } } bool getNumberOfImages = tags.Any(e => e.ToLower().Contains("<getcount>")); int requestedNumber = 1; List <string> forcedTags = tags.ToList(); foreach (string forcedTag in forcedTags) { if (int.TryParse(forcedTag, out int result)) { if (this.Context.Channel.Name.ToLower().Contains("bot") || this.Context.Channel.Id == 344718149398036490) { requestedNumber = result <= 5 ? result : 5; } else { await this.ReplyAsync("Keep image spam to bot channels. The image count has been set to 1."); } } } forcedTags.RemoveAll(e => e.ToLower() == "<getcount>" || e.ToLower() == "<getid>" || int.TryParse(e, out int _)); await this.Context.Channel.TriggerTypingAsync(); List <string> exceededTags = null; if (forcedTags.Count > 6) { //await this.ReplyAsync("Tag limit of 6 exceeded."); //return; exceededTags = forcedTags.Skip(6).ToList(); forcedTags = forcedTags.Take(6).ToList(); } List <Task> taskList = new List <Task>(); for (int i = 1; i < 6; i++) { taskList.Add(GetJson(i, forcedTags)); } await Task.WhenAll(taskList); e621ImageList.RemoveAll(e => { bool shouldDelete = false; if (!this.Context.Channel.IsNsfw) { shouldDelete = e.Rating != ESixImage.E621Rating.Safe; } if (shouldDelete) { return(true); } string extension = e.FileExtension; shouldDelete = extension != "png" && extension != "jpg" && extension != "jpeg" && extension != "gif"; if (shouldDelete) { return(true); } if (e.Tags.Any(f => options.BlackList.Select(g => g.ToLower()).Contains(f.ToLower()))) { return(true); } if (exceededTags != null) { foreach (string exceededTag in exceededTags) { if (!exceededTag.StartsWith("-")) { continue; } return(e.Tags.Any(f => string.Equals(f, exceededTag.TrimStart('-'), StringComparison.CurrentCultureIgnoreCase))); } return(exceededTags.Any(exceededTag => !e.Tags.Any(f => string.Equals(f, exceededTag, StringComparison.CurrentCultureIgnoreCase)))); } return(false); }); if (e621ImageList.Count == 0) { await this.ReplyAsync("Couldn't find an image with those tags."); return; } if (getNumberOfImages) { await this.ReplyAsync($"Counted {e621ImageList.Count} images. Please note that this command enforces a limit of query pages, which is then filtered to remove blacklist items and unsupported filetypes."); return; } requestedNumber = requestedNumber > e621ImageList.Count ? e621ImageList.Count : requestedNumber; for (int i = 0; i < requestedNumber; i++) { int indexOfImage = Globals.Random.Next(0, e621ImageList.Count); selectedImages.Add(e621ImageList[indexOfImage]); e621ImageList.RemoveAt(indexOfImage); } foreach (ESixImage selectedImage in selectedImages) { currentJObject = selectedImage; string ext = selectedImage.FileExtension; string url = selectedImage.ImageUrl ?? throw new Exception("Couldn't find an image with those tags."); // if (selectedImage.GetValue("file_size").ToObject<ulong>() > 8000000) // { // await this.E6FileSizeChecker(images, tags); // return; // } //Do mimetypes switch (ext) { case "jpg": case "jpeg": case "gif": case "png": break; case "webm": { await this.E6Task(tags); return; } case "mp4": { await this.E6Task(tags); return; } default: { await this.E6Task(tags); return; } } // using (MemoryStream stream = new MemoryStream(await Program.Instance.HttpClient.GetByteArrayAsync(url))) // { // await this.Context.Channel.SendFileAsync(stream, $"image.{ext}"); EmbedBuilder builder = new EmbedBuilder(); if (options.DisplaySources) { string[] sources = selectedImage.Sources ?? new[] { "No sources have been given for this image." }; builder.Fields.Add(new EmbedFieldBuilder { IsInline = true, Name = "Sources", Value = sources.Aggregate((d, g) => $"{d}\n{g}") }); } if (options.DisplayTags) { string allTags = string.Join(", ", selectedImage.Tags); List <string> excessTags = new List <string>(); int neededFields = 0; if (allTags.Length > 1024) { excessTags.AddRange(this.SplitToLists(selectedImage.Tags, 1024, 2).Select(enumerable => String.Join(", ", enumerable))); allTags = excessTags.First(); excessTags.RemoveAt(0); } builder.Fields.Add(new EmbedFieldBuilder { IsInline = true, Name = "Tags", Value = allTags }); foreach (string t in excessTags) { builder.Fields.Add(new EmbedFieldBuilder { IsInline = true, Name = "Tags (cont.)", Value = t }); } } string artists = selectedImage.Artists != null?string.Join(", ", selectedImage.Artists) : "Unknown"; builder.Author = new EmbedAuthorBuilder { IconUrl = "http://i.imgur.com/3ngaS8h.png", Name = $"#{selectedImage.Id}: {artists}", Url = $"https://e621.net/post/show/{selectedImage.Id}" }; builder.ImageUrl = url; builder.Description = $"Score: {selectedImage.Score}\nFavorites: {selectedImage.FavoriteCount}"; System.Drawing.Color embedColor = System.Drawing.Color.Aquamarine; switch (selectedImage.Rating) { case ESixImage.E621Rating.Safe: { embedColor = System.Drawing.Color.Green; } break; case ESixImage.E621Rating.Questionable: { embedColor = System.Drawing.Color.Yellow; } break; case ESixImage.E621Rating.Explict: { embedColor = Color.Red; } break; default: break; } builder.WithColor(embedColor); await this.ReplyAsync(string.Empty, false, builder.Build()); } e6Stopwatch?.Restart(); //} } catch (Exception e) { ConsoleHelper.WriteLine(e); if (currentJObject == null) { await this.ReplyAsync("JObject was null!"); return; } if (getJsonId) { await this.ReplyAsync("Image with id \"" + currentJObject.Id + "\" failed to send."); } } finally { typingDisposable.Dispose(); e621ImageList.Clear(); } }
protected override void OnBeginFrame() { _stopwatch.Start(); _stopwatch2.Restart(); //Win32.MakeCurrent(_hdc, _hglrc); }
private void CompareData(IEnumerable <object> fileData, IEnumerable <object> dbData) { Type RecordType = fileData.ElementAt(0).GetType(); System.Diagnostics.Debug.WriteLine("Distinct File Count: " + fileData.Count()); IEnumerable <PropertyInfo> KeyProperties = (from prop in RecordType.GetProperties() from att in prop.CustomAttributes where att.AttributeType == typeof(KeyAttribute) select prop); IEnumerable <PropertyInfo> PropertiesToChagne = from pro in RecordType.GetProperties() from not in KeyProperties where pro.Name != not.Name select pro; Stopwatch SW = new Stopwatch(); using (CHUMDB context = new CHUMDB()) { ILog Logger = ServiceLocator.Current.GetInstance <ILog>(); List <object> RangeAdd = new List <object>(); int count = 1; foreach (object fileRecord in fileData) { if ((count % 1000) == 0) { Logger.LogMessage("Processed: " + RecordType.Name + "\tRecord: " + count + "/" + fileData.Count() + " in: " + SW.ElapsedMilliseconds + " ms"); SW.Restart(); } //CHEKING db object Record = GetRecordFromDB(fileRecord, dbData); if (Record == null) { object[] Propvales = KeyProperties.Select(i => i.GetValue(fileRecord)).ToArray(); if (Propvales.Where(i => string.IsNullOrWhiteSpace(i.ToString())).Count() == 0) { RangeAdd.Add(fileRecord); } } else { object[] Propvales = KeyProperties.Select(i => i.GetValue(Record)).ToArray(); object DBRecord = context.Set(Record.GetType()).Find(Propvales); foreach (PropertyInfo prop in PropertiesToChagne) { if (prop.GetValue(DBRecord) != prop.GetValue(fileRecord)) { prop.SetValue(DBRecord, prop.GetValue(fileRecord)); } } } count++; } System.Diagnostics.Debug.WriteLine("Adding range data for: " + RecordType.Name); context.Set(RecordType).AddRange(RangeAdd); System.Diagnostics.Debug.WriteLine("Adding range data Done"); Logger.LogMessage("Saving Data"); Logger.LogMessage("Data Saved: " + context.SaveChanges()); } }
public async Task DoctorMarkov() { try { Stopwatch markovStopwatch = null; if (!this.Context.Channel.Name.Contains("bot") && markovStopwatches.TryGetValue(this.Context.Channel.Id, out markovStopwatch) && markovStopwatch.IsRunning && markovStopwatch.ElapsedMilliseconds < 60000) { await this.ReplyAsync($"Please wait {60 - markovStopwatch.Elapsed.Seconds} seconds until using this command."); return; } if (!markovStopwatches.ContainsKey(this.Context.Channel.Id) && !this.Context.Channel.Name.Contains("bot")) { markovStopwatches.Add(this.Context.Channel.Id, new Stopwatch()); markovStopwatches.TryGetValue(this.Context.Channel.Id, out markovStopwatch); } File.WriteAllText(Path.Combine(Program.AppPath, "Markovs", "fileLoc.txt"), "dontblink.txt"); ProcessStartInfo info = new ProcessStartInfo { FileName = "py", Arguments = $"-3 \"{Path.Combine(Program.AppPath, "Markovs", "markovNewline.py")}\"", UseShellExecute = false, RedirectStandardOutput = true }; Process markovProcess = Process.Start(info); string output = markovProcess.StandardOutput.ReadToEnd(); Console.Out.WriteLine($"[python-output@{DateTime.Now.ToLongTimeString()}]: {output}"); while (!File.Exists(Path.Combine(Program.AppPath, "Markovs", "output.txt"))) { } if (string.IsNullOrWhiteSpace(File.ReadAllText(Path.Combine(Program.AppPath, "Markovs", "output.txt")))) { await this.ReplyAsync("Markov creation failed!"); } else { await this.ReplyAsync(File.ReadAllText(Path.Combine(Program.AppPath, "Markovs", "output.txt"))); markovStopwatch?.Restart(); } } catch (Exception e) { Console.WriteLine(e); if (!File.Exists(Path.Combine(Program.AppPath, "Markovs", "output.txt")) || string.IsNullOrWhiteSpace(File.ReadAllText(Path.Combine(Program.AppPath, "Markovs", "output.txt")))) { await this.ReplyAsync("Markov creation failed!"); } } finally { if (File.Exists(Path.Combine(Program.AppPath, "Markovs", "output.txt"))) { File.Delete(Path.Combine(Program.AppPath, "Markovs", "output.txt")); } } }
public void ResetDeltaTime() { _sw?.Restart(); _previousFrameTicks = 0L; }
/// <summary> /// Manages updating all entities on the world. /// - Server-side command-line commands are handled in their own thread. /// - Database I/O is handled in its own thread. /// - Network commands come from their own listener threads, and are queued for each sessions which are then processed here. /// - This thread does the rest of the work! /// </summary> private static void UpdateWorld() { log.DebugFormat("Starting UpdateWorld thread"); WorldActive = true; var worldTickTimer = new Stopwatch(); while (!pendingWorldStop) { /* * When it comes to thread safety for Landblocks and WorldObjects, ACE makes the following assumptions: * * Inbound ClientMessages and GameActions are handled on the main UpdateWorld thread. * - These actions may load Landblocks and modify other WorldObjects safely. * * PlayerEnterWorld queue is run on the main UpdateWorld thread. * - These actions may load Landblocks and modify other WorldObjects safely. * * Landblock Groups (calculated by LandblockManager) can be processed in parallel. * * Adjacent Landblocks will always be run on the same thread. * * Non-adjacent landblocks might be run on different threads. * - If two non-adjacent landblocks both touch the same landblock, and that landblock is active, they will be run on the same thread. * * Database results are returned from a task spawned in SerializedShardDatabase (via callback). * - Minimal processing should be done from the callback. Return as quickly as possible to let the database thread do database work. * - The processing of these results should be queued to an ActionQueue * * The only cases where it's acceptable for to create a new Task, Thread or Parallel loop are the following: * - Every scenario must be one where you don't care about breaking ACE * - DeveloperCommand Handlers */ worldTickTimer.Restart(); ServerPerformanceMonitor.RestartEvent(ServerPerformanceMonitor.MonitorType.PlayerManager_Tick); PlayerManager.Tick(); ServerPerformanceMonitor.RegisterEventEnd(ServerPerformanceMonitor.MonitorType.PlayerManager_Tick); ServerPerformanceMonitor.RestartEvent(ServerPerformanceMonitor.MonitorType.NetworkManager_InboundClientMessageQueueRun); NetworkManager.InboundMessageQueue.RunActions(); ServerPerformanceMonitor.RegisterEventEnd(ServerPerformanceMonitor.MonitorType.NetworkManager_InboundClientMessageQueueRun); // This will consist of PlayerEnterWorld actions, as well as other game world actions that require thread safety ServerPerformanceMonitor.RestartEvent(ServerPerformanceMonitor.MonitorType.actionQueue_RunActions); actionQueue.RunActions(); ServerPerformanceMonitor.RegisterEventEnd(ServerPerformanceMonitor.MonitorType.actionQueue_RunActions); ServerPerformanceMonitor.RestartEvent(ServerPerformanceMonitor.MonitorType.DelayManager_RunActions); DelayManager.RunActions(); ServerPerformanceMonitor.RegisterEventEnd(ServerPerformanceMonitor.MonitorType.DelayManager_RunActions); ServerPerformanceMonitor.RestartEvent(ServerPerformanceMonitor.MonitorType.UpdateGameWorld); var gameWorldUpdated = UpdateGameWorld(); ServerPerformanceMonitor.RegisterEventEnd(ServerPerformanceMonitor.MonitorType.UpdateGameWorld); ServerPerformanceMonitor.RestartEvent(ServerPerformanceMonitor.MonitorType.NetworkManager_DoSessionWork); int sessionCount = NetworkManager.DoSessionWork(); ServerPerformanceMonitor.RegisterEventEnd(ServerPerformanceMonitor.MonitorType.NetworkManager_DoSessionWork); ServerPerformanceMonitor.Tick(); // We only relax the CPU if our game world is able to update at the target rate. // We do not sleep if our game world just updated. This is to prevent the scenario where our game world can't keep up. We don't want to add further delays. // If our game world is able to keep up, it will not be updated on most ticks. It's on those ticks (between updates) that we will relax the CPU. if (!gameWorldUpdated) { Thread.Sleep(sessionCount == 0 ? 10 : 1); // Relax the CPU more if no sessions are connected } Timers.PortalYearTicks += worldTickTimer.Elapsed.TotalSeconds; } // World has finished operations and concedes the thread to garbage collection WorldActive = false; }
static void DrawScreen(string[,] screen, int[,] projectiles, int[] oPos, Stopwatch sw) { for (int i = 0; i < 25; i++) { for (int j = 0; j < 55; j++) { if (oPos[0] == i && oPos[1] == j) { if (oPos[2] == 1) { Console.WriteLine("X"); sw.Stop(); Console.Write("Your time: {0}", sw.Elapsed); Thread.Sleep(5000); Console.Clear(); sw.Restart(); oPos[2] = 0; WipeProjectiles(projectiles); } else { Console.Write("O"); } } else if (projectiles[i, j] == 1) { Console.Write(RandomProjectile()); } else Console.Write(screen[i, j]); } if (oPos[0] == i) { } else { Console.WriteLine(); } } }
public static async Task <int> Main(params string[] args) { Thread.CurrentThread.CurrentCulture = CultureInfo.CreateSpecificCulture("en-GB"); OperationProgress progress = new OperationProgress(); Stopwatch sw = new Stopwatch(); uint count = 0; var rootCommand = new RootCommand("MSLA/DLP, file analysis, repair, conversion and manipulation") { new Option(new [] { "-f", "--file" }, "Input file to read") { IsRequired = true, Argument = new Argument <FileSystemInfo>("filepath").ExistingOnly() }, new Option(new [] { "-o", "--output" }, "Output file to save the modifications, if aware, it saves to the same input file") { Argument = new Argument <FileSystemInfo>("filepath") }, new Option(new [] { "-e", "--extract" }, "Extract file content to a folder") { Argument = new Argument <DirectoryInfo>("folder") }, new Option(new [] { "-c", "--convert" }, "Converts input into a output file format by it extension") { Argument = new Argument <FileSystemInfo>("filepath"), }, new Option(new [] { "-p", "--properties" }, "Print a list of all properties/settings"), new Option(new [] { "-gcode" }, "Print the GCode if available"), new Option(new [] { "-i", "--issues" }, "Compute and print a list of all issues"), new Option(new [] { "-r", "--repair" }, "Attempt to repair all issues") { Argument = new Argument <int[]>("[start layer index] [end layer index] [islands 0/1] [remove empty layers 0/1] [resin traps 0/1]"), }, new Option(new [] { "-mr", "--mut-resize" }, "Resizes layer images in a X and/or Y factor, starting from 100% value") { Argument = new Argument <decimal[]>("[x%] [y%] [start layer index] [end layer index] [fade 0/1]") }, new Option(new [] { "-ms", "--mut-solidify" }, "Closes all inner holes") { Argument = new Argument <uint[]>("[start layer index] [end layer index]") }, new Option(new [] { "-me", "--mut-erode" }, "Erodes away the boundaries of foreground object") { Argument = new Argument <uint[]>("[start iterations] [end iterations] [start layer index] [end layer index] [fade 0/1]") }, new Option(new [] { "-md", "--mut-dilate" }, "It is just opposite of erosion") { Argument = new Argument <uint[]>("[start iterations] [end iterations] [start layer index] [end layer index] [fade 0/1]") }, new Option(new [] { "-mc", "--mut-close" }, "Dilation followed by Erosion") { Argument = new Argument <uint[]>("[start iterations] [end iterations] [start layer index] [end layer index] [fade 0/1]") }, new Option(new [] { "-mo", "--mut-open" }, "Erosion followed by Dilation") { Argument = new Argument <uint[]>("[start iterations] [end iterations] [start layer index] [end layer index] [fade 0/1]") }, new Option(new [] { "-mg", "--mut-gradient" }, "The difference between dilation and erosion of an image") { Argument = new Argument <uint[]>("[kernel size] [start layer index] [end layer index] [fade 0/1]") }, new Option(new [] { "-mpy", "--mut-py" }, "Performs down-sampling step of Gaussian pyramid decomposition") { Argument = new Argument <uint[]>("[start layer index] [end layer index]") }, new Option(new [] { "-mgb", "--mut-gaussian-blur" }, "Each pixel is a sum of fractions of each pixel in its neighborhood") { Argument = new Argument <ushort[]>("[aperture] [sigmaX] [sigmaY]") }, new Option(new [] { "-mmb", "--mut-median-blur" }, "Each pixel becomes the median of its surrounding pixels") { Argument = new Argument <ushort>("[aperture]") }, }; rootCommand.Handler = CommandHandler.Create( ( FileSystemInfo file, FileSystemInfo convert, DirectoryInfo extract, bool properties, bool gcode, bool issues, int[] repair //decimal[] mutResize ) => { var fileFormat = FileFormat.FindByExtension(file.FullName, true, true); if (ReferenceEquals(fileFormat, null)) { Console.WriteLine($"Error: {file.FullName} is not a known nor valid format."); } else { Console.Write($"Reading: {file}"); sw.Restart(); fileFormat.Decode(file.FullName, progress); sw.Stop(); Console.WriteLine($", in {sw.ElapsedMilliseconds}ms"); Console.WriteLine("----------------------"); Console.WriteLine($"Layers: {fileFormat.LayerCount} x {fileFormat.LayerHeight}mm = {fileFormat.TotalHeight}mm"); Console.WriteLine($"Resolution: {new Size((int) fileFormat.ResolutionX, (int) fileFormat.ResolutionY)}"); Console.WriteLine($"AntiAlias: {fileFormat.ValidateAntiAliasingLevel()}"); Console.WriteLine($"Bottom Layer Count: {fileFormat.BottomLayerCount}"); Console.WriteLine($"Bottom Exposure Time: {fileFormat.BottomExposureTime}s"); Console.WriteLine($"Layer Exposure Time: {fileFormat.ExposureTime}s"); Console.WriteLine($"Print Time: {fileFormat.PrintTime}s"); Console.WriteLine($"Cost: {fileFormat.MaterialCost}$"); Console.WriteLine($"Resin Name: {fileFormat.MaterialName}"); Console.WriteLine($"Machine Name: {fileFormat.MachineName}"); Console.WriteLine($"Thumbnails: {fileFormat.CreatedThumbnailsCount}"); Console.WriteLine("----------------------"); } if (!ReferenceEquals(extract, null)) { Console.Write($"Extracting to {extract.FullName}"); sw.Restart(); fileFormat.Extract(extract.FullName, true, true, progress); sw.Stop(); Console.WriteLine($", finished in {sw.ElapsedMilliseconds}ms"); } if (properties) { count = 0; Console.WriteLine("Listing all properties:"); Console.WriteLine("----------------------"); foreach (var config in fileFormat.Configs) { Console.WriteLine("******************************"); Console.WriteLine($"\t{config.GetType().Name}"); Console.WriteLine("******************************"); foreach (PropertyInfo propertyInfo in config.GetType() .GetProperties(BindingFlags.Public | BindingFlags.Instance)) { count++; if (propertyInfo.Name.Equals("Item")) { continue; } Console.WriteLine($"{propertyInfo.Name}: {propertyInfo.GetValue(config)}"); } } Console.WriteLine("----------------------"); Console.WriteLine($"Total properties: {count}"); } if (gcode) { if (ReferenceEquals(fileFormat.GCode, null)) { Console.WriteLine("No GCode available"); } else { Console.WriteLine("----------------------"); Console.WriteLine(fileFormat.GCode); Console.WriteLine("----------------------"); Console.WriteLine($"Total lines: {fileFormat.GCode.Length}"); } } if (issues) { Console.WriteLine("Computing Issues, please wait."); sw.Restart(); var issueList = fileFormat.LayerManager.GetAllIssues(null, null, null, null, true, null, progress); sw.Stop(); Console.WriteLine("Issues:"); Console.WriteLine("----------------------"); count = 0; foreach (var issue in issueList) { Console.WriteLine(issue); count++; } /*for (uint layerIndex = 0; layerIndex < fileFormat.LayerCount; layerIndex++) * { * if(!issuesDict.TryGetValue(layerIndex, out var list)) continue; * foreach (var issue in list) * { * Console.WriteLine(issue); * count++; * } * }*/ Console.WriteLine("----------------------"); Console.WriteLine($"Total Issues: {count} in {sw.ElapsedMilliseconds}ms"); } if (!ReferenceEquals(convert, null)) { var fileConvert = FileFormat.FindByExtension(convert.FullName, true, true); if (ReferenceEquals(fileFormat, null)) { Console.WriteLine($"Error: {convert.FullName} is not a known nor valid format."); } else { Console.WriteLine($"Converting {fileFormat.GetType().Name} to {fileConvert.GetType().Name}: {convert.Name}"); try { sw.Restart(); fileFormat.Convert(fileConvert, convert.FullName, progress); sw.Stop(); Console.WriteLine($"Convertion done in {sw.ElapsedMilliseconds}ms"); } catch (Exception e) { Console.WriteLine(e); } } } if (!ReferenceEquals(repair, null)) { uint layerStartIndex = (uint)(repair.Length >= 1 ? Math.Max(0, repair[0]) : 0); uint layerEndIndex = repair.Length >= 2 ? (uint)repair[1].Clamp(0, (int)(fileFormat.LayerCount - 1)) : fileFormat.LayerCount - 1; bool repairIslands = repair.Length < 3 || repair[2] > 0 || repair[2] < 0; bool removeEmptyLayers = repair.Length < 4 || repair[3] > 0 || repair[3] < 0; bool repairResinTraps = repair.Length < 5 || repair[4] > 0 || repair[4] < 0; //fileFormat.LayerManager.RepairLayers(layerStartIndex, layerEndIndex, 2, 1, 4, repairIslands, removeEmptyLayers, repairResinTraps, null, progress); } }); //await rootCommand.InvokeAsync(args); await rootCommand.InvokeAsync("-f body_Tough0.1mm_SL1_5h16m_HOLLOW_DRAIN.sl1 -r -1"); return(1); }
public async Task MultiMarkov(params IUser[] users) { try { List <ulong> requestedUsersIds = users.Select(e => e.Id).ToList(); requestedUsersIds.Add(this.Context.User.Id); Stopwatch markovStopwatch = null; if (!this.Context.Channel.Name.Contains("bot") && markovStopwatches.TryGetValue(this.Context.Channel.Id, out markovStopwatch) && markovStopwatch.IsRunning && markovStopwatch.ElapsedMilliseconds < 60000) { await this.ReplyAsync($"Please wait {60 - markovStopwatch.Elapsed.Seconds} seconds until using this command."); return; } if (!markovStopwatches.ContainsKey(this.Context.Channel.Id) && !this.Context.Channel.Name.Contains("bot")) { markovStopwatches.Add(this.Context.Channel.Id, new Stopwatch()); markovStopwatches.TryGetValue(this.Context.Channel.Id, out markovStopwatch); } if (!Directory.Exists(Path.Combine(Program.AppPath, "Logs", this.Context.Guild.Name))) { return; } Dictionary <ulong, IMessage> allCachedMessages = Program.LogMessages; allCachedMessages = allCachedMessages.Where(e => users.Any(f => f.Id == e.Value.Author.Id)).ToDictionary(e => e.Key, e => e.Value); File.WriteAllLines(Path.Combine(Program.AppPath, "Markovs", "currentmulti.txt"), allCachedMessages.Values.Select(e => e.Content)); File.WriteAllText(Path.Combine(Program.AppPath, "Markovs", "fileLoc.txt"), "currentmulti.txt"); ProcessStartInfo info = new ProcessStartInfo { FileName = "py", Arguments = $"-3 \"{Path.Combine(Program.AppPath, "Markovs", "markovNewline.py")}\"", UseShellExecute = false, RedirectStandardOutput = true }; Process markovProcess = Process.Start(info); string output = markovProcess?.StandardOutput.ReadToEnd(); Console.Out.WriteLine($"[python-output@{DateTime.Now.ToLongTimeString()}]: {output}"); while (!File.Exists(Path.Combine(Program.AppPath, "Markovs", "output.txt"))) { } if (string.IsNullOrWhiteSpace(File.ReadAllText(Path.Combine(Program.AppPath, "Markovs", "output.txt")))) { await this.ReplyAsync("Markov creation failed!"); } else { await this.ReplyAsync(Regex.Replace(File.ReadAllText(Path.Combine(Program.AppPath, "Markovs", "output.txt")), @"<@[^\s]*([0-9]+)>", e => { IGuildUser user = this.Context.Guild.GetUserAsync(Convert.ToUInt64(e.Value.Replace("@", string.Empty).Replace("!", string.Empty).Replace(">", string.Empty).Replace("<", string.Empty))).Result; return(string.IsNullOrWhiteSpace(user.Nickname) ? $"[{user.Username}]" : $"[{user.Nickname}]"); })); markovStopwatch?.Restart(); } } catch (Exception e) { Console.WriteLine(e); if (!File.Exists(Path.Combine(Program.AppPath, "Markovs", "output.txt")) || string.IsNullOrWhiteSpace(File.ReadAllText(Path.Combine(Program.AppPath, "Markovs", "output.txt")))) { await this.ReplyAsync("Markov creation failed!"); } } finally { if (File.Exists(Path.Combine(Program.AppPath, "Markovs", "output.txt"))) { File.Delete(Path.Combine(Program.AppPath, "Markovs", "output.txt")); } } }
static void Main() { // Problem 1. DAO Class Test Employee employee = EmployeeDAO.FindByKey(2); Console.WriteLine(employee.LastName); employee.LastName = "Costner"; // EmployeeDAO.Modify(employee); employee = new Employee(); employee.FirstName = "Georgi"; employee.LastName = "Georgiev"; employee.JobTitle = "Production Technician"; employee.DepartmentID = 7; employee.ManagerID = 16; employee.Salary = 2000; employee.AddressID = 166; employee.HireDate = DateTime.Now; // EmployeeDAO.Add(employee); employee = EmployeeDAO.FindByKey(4); // EmployeeDAO.Delete(employee); // Problem 3. Database Search Queries // 1. Find all employees who have projects started in the time period // 2001 - 2003 (inclusive). // Select the project's name, start date, end date and manager name. var projects = EmployeeDAO.Context.Projects .Where(p => p.StartDate.Year >= 2001 && p.StartDate.Year <= 2003) .Select(p => new { ProjectName = p.Name, StartDate = p.StartDate, EndDate = p.EndDate // There is no project manager in the database }); Console.WriteLine(); Console.WriteLine("Projects started between 2001 and 2003"); Console.WriteLine(); foreach (var project in projects) { Console.WriteLine(project.ProjectName + " " + project.StartDate + " " + project.EndDate); } // 2. Find all addresses, ordered by the number of employees // who live there (descending), then by town name (ascending). // Take only the first 10 addresses and select their address text, // town name and employee count. var addresses = EmployeeDAO.Context.Addresses .OrderByDescending(a => a.Employees.Count) .Select(a => new { AddressText = a.AddressText, TownName = a.Town.Name, EmployeeCount = a.Employees.Count }).Take(10); foreach (var address in addresses) { Console.WriteLine("{0} - {1} - {2} employees", address.AddressText, address.TownName, address.EmployeeCount); } // 3. Get an employee by id (e.g. 147). // Select only his/her first name, last name, // job title and projects (only their names). // The projects should be ordered by name (ascending). var employeesSelected = EmployeeDAO.Context.Employees .Where(e => e.EmployeeID == 147) .Select(e => new { JobTitle = e.JobTitle, Projects = e.Projects.OrderBy(p => p.Name).Select(p => p.Name) }); Console.WriteLine(); Console.WriteLine("Employee with projects"); Console.WriteLine(); foreach (var employeeSelected in employeesSelected) { Console.WriteLine(" --- " + employeeSelected.JobTitle); foreach (var projectName in employeeSelected.Projects) { Console.WriteLine(projectName); } } // 4. Find all departments with more than 5 employees. // Order them by employee count (ascending). // Select the department name, manager name and first name, // last name, hire date and job title of every employee. var departments = EmployeeDAO.Context.Departments .Where(d => d.Employees.Count > 5) .OrderBy(d => d.Employees.Count) .Select(d => new { DepartmentName = d.Name, ManagerName = EmployeeDAO.Context.Employees .Where(e => e.EmployeeID == d.ManagerID) .Select(e => e.LastName).FirstOrDefault(), Employees = d.Employees.Select(e => new { FirstName = e.FirstName, LastName = e.LastName, HireDate = e.HireDate, JobTitle = e.JobTitle }) }); Console.WriteLine(); Console.WriteLine("Departments with more than 5 employees with their employees"); Console.WriteLine(); foreach (var department in departments) { Console.WriteLine(" ------------- {0}, Manager: {1}", department.DepartmentName, department.ManagerName); foreach (var departmentEmployee in department.Employees) { Console.WriteLine(" - {0} {1} - {2} - HireDate: {3}", departmentEmployee.FirstName, departmentEmployee.LastName, departmentEmployee.JobTitle, departmentEmployee.HireDate.ToString("dd/MM/yyyy")); } Console.WriteLine(); } // Problem 4. Native SQL Query // Find all employees who have projects with start date in the year 2002. // Select only their first name. // Solve this task by using both LINQ query and native SQL query through the context. Stopwatch sw = new Stopwatch(); sw.Start(); var linqResults = EmployeeDAO.Context.Employees .Where(e => e.Projects.Any(p => p.StartDate.Year == 2002)) .Select(e => e.FirstName); Console.WriteLine(string.Join(", ", linqResults)); Console.WriteLine(); Console.WriteLine("LINQ Query time: {0}", sw.Elapsed); Console.WriteLine(); sw.Restart(); var sqlQueryResults = EmployeeDAO.Context .Database .SqlQuery<string>("select e.FirstName " + "from Employees e " + "join EmployeesProjects ep on ep.EmployeeID = e.EmployeeID " + "join Projects p on ep.ProjectID = p.ProjectID " + "where DATEPART(YEAR, p.StartDate) = 2002"); Console.WriteLine(string.Join(", ", sqlQueryResults)); Console.WriteLine(); Console.WriteLine("SQL Query time: {0}", sw.Elapsed); Console.WriteLine(); sw.Stop(); // Problem 5. Concurrent Database Changes with EF var ctx1 = new SoftUniEntities(); var ctx2 = new SoftUniEntities(); var employeeToUpdate1 = ctx1.Employees.Find(7); var employeeToUpdate2 = ctx2.Employees.Find(7); employeeToUpdate1.FirstName = "Change 1"; employeeToUpdate2.FirstName = "Change 2"; ctx1.SaveChanges(); ctx2.SaveChanges(); // Without concurrency fixed - the last change is submitted // in the other case - the first one is submitted // Problem 6. Call a Stored Procedure Console.WriteLine(); Console.WriteLine("Stored Procedure result:"); Console.WriteLine(); var result = EmployeeDAO.GetProjectsByEmployee(employeeToUpdate1); foreach (var project in result) { Console.WriteLine("{0} - {1} - {2}", project.Name, project.StartDate.ToString("dd/MM/yyyy"), project.EndDate != null ? project.EndDate.ToString() : "(NULL)"); } }
/// <summary> /// Drinks a dose of stamina potion. /// Assumes that a stamina potion is already in its designated inventory slot. /// </summary> protected void DrinkStaminaPotion() { Inventory.ClickInventory(InventorySlotStaminaPotion); StaminaTimer.Restart(); }
static void Main(string[] args) { //初始化NameValueCollection string surl = "id=10010&width=10&height=10&ua=ua&ip=127.0.0.1&imei=00000000000000&android_id=A00000000000000&make=1111111111&model=XXX&os=android&osv=4.0.1&connectionType=1&deviceType=1&mac=0.0.0.0.0.0.0&screenWidth=100&screenHeight=100&appName=test&ppi=600&dpidsha1=dpidsha1&plmn=1&orientation=1&pos=1&instl=true&ver=1.0.0&bundle=bundle"; HttpQueryCollection collection = new HttpQueryCollection(surl, false); //初始化AutoCopy var ac = AutoCopy.CreateMap <Ext, NameValueCollection>(); ac.Provider = new HttpRequestParamsExpressionProvider(typeof(NameValueCollection)); var autoCopy = AutoCopy.CreateMap <Data, NameValueCollection>(); autoCopy.ForMember(p => p.ext, opt => opt.MapFrom(p => ac.Map(p))); autoCopy.Provider = new HttpRequestParamsExpressionProvider(typeof(NameValueCollection)); autoCopy.Register(); //测试开始 Stopwatch sw = new Stopwatch(); #if !DEBUG int loop = 1000000; #else int loop = 10; #endif sw.Start(); for (int i = 0; i < loop; i++) { Data data; TryParse(collection, out data); } sw.Stop(); Console.WriteLine("手写解析方法循环" + loop + "次耗时" + sw.ElapsedMilliseconds + "毫秒"); sw.Restart(); for (int i = 0; i < loop; i++) { Data data; data = autoCopy.Map(collection); } sw.Stop(); Console.WriteLine("自动解析方法循环" + loop + "次耗时" + sw.ElapsedMilliseconds + "毫秒 fastmode:" + autoCopy.IsFastMode); // var autoCopy = AutoCopy.CreateMap<CustomerInfo, Customer>(); // autoCopy // .ForMember(p => p.zipCode, opt => opt.MapFrom(p => p.Address.ZipCode)) // .ForMember(p => p.PhoneNumber, opt => opt.MapFrom(p => p.Phone.Number)); // autoCopy.Register(); //#if !DEBUG // int loop = 10000000; //#else // int loop = 10; //#endif // Stopwatch sw = new Stopwatch(); // sw.Restart(); // for (int i = 0; i < loop; i++) // { // Customer customer = new Customer(); // CustomerInfo customerInfo = new CustomerInfo(); // customer.Address = new Address { ZipCode = "1234567890" }; // customer.Phone = new Telephone { Number = "17791704580" }; // customer.Memo = "测试默认拷贝"; // ShallowCopy(customerInfo, customer); // } // sw.Stop(); // Console.WriteLine(sw.ElapsedMilliseconds); // sw.Restart(); // for (int i = 0; i < loop; i++) // { // Customer customer = new Customer(); // CustomerInfo customerInfo = new CustomerInfo(); // customer.Address = new Address { ZipCode = "1234567890" }; // customer.Phone = new Telephone { Number = "17791704580" }; // customer.Memo = "测试默认拷贝"; // autoCopy.ShallowCopy(customerInfo, customer); // } // sw.Stop(); // Console.WriteLine(sw.ElapsedMilliseconds); // Customer c = new Customer(); // c.Address = new Address { ZipCode = "1234567890" }; // c.Phone = new Telephone { Number = "17791704580" }; // c.Memo = "测试默认拷贝"; // var ci = autoCopy.Map(c); // var propertyInfo = PropertyHelper<Customer>.GetProperty(p => p.Address.ZipCode); // var parameter = Expression.Parameter(typeof(Customer), "c"); // var body = Expression.MakeMemberAccess(parameter, propertyInfo); // var func = Expression.Lambda(body, parameter).Compile(); }
static void Main() { Stopwatch watch = new Stopwatch(); // Addition Console.WriteLine("Addition:"); watch.Restart(); Addition.AddInt(0, 10000000, 2); watch.Stop(); Console.WriteLine("Int: "+watch.ElapsedMilliseconds); watch.Restart(); Addition.AddLong(0L, 10000000L, 2L); watch.Stop(); Console.WriteLine("Long: " + watch.ElapsedMilliseconds); watch.Restart(); Addition.AddFloat(0f, 10000000f, 2f); watch.Stop(); Console.WriteLine("Float: " + watch.ElapsedMilliseconds); watch.Restart(); Addition.AddDouble(0d, 10000000d, 2d); watch.Stop(); Console.WriteLine("Double: " + watch.ElapsedMilliseconds); watch.Restart(); Addition.AddDecimal(0m, 10000000m, 2m); watch.Stop(); Console.WriteLine("Decimal: " + watch.ElapsedMilliseconds); // Substraction Console.WriteLine("Substraction:"); watch.Restart(); Substraction.SubstractInt(10000000, 0, 2); watch.Stop(); Console.WriteLine("Int: " + watch.ElapsedMilliseconds); watch.Restart(); Substraction.SubstractLong(10000000L, 0L, 2L); watch.Stop(); Console.WriteLine("Long: " + watch.ElapsedMilliseconds); watch.Restart(); Substraction.SubstractFloat(10000000f, 0f, 2f); watch.Stop(); Console.WriteLine("Float: " + watch.ElapsedMilliseconds); watch.Restart(); Substraction.SubstractDouble(10000000d, 0d, 2d); watch.Stop(); Console.WriteLine("Double: " + watch.ElapsedMilliseconds); watch.Restart(); Substraction.SubstractDecimal(10000000m, 0m, 2m); watch.Stop(); Console.WriteLine("Decimal: " + watch.ElapsedMilliseconds); // Multiplication Console.WriteLine("Multiplication:"); watch.Restart(); Mutltiplication.MultiplicateInt(1, 10000000, 2); watch.Stop(); Console.WriteLine("Int: " + watch.ElapsedMilliseconds); watch.Restart(); Mutltiplication.MultiplicateLong(1L, 10000000L, 2L); watch.Stop(); Console.WriteLine("Long: " + watch.ElapsedMilliseconds); watch.Restart(); Mutltiplication.MultiplicateFloat(1f, 10000000f, 2f); watch.Stop(); Console.WriteLine("Float: " + watch.ElapsedMilliseconds); watch.Restart(); Mutltiplication.MultiplicateDouble(1d, 10000000d, 2d); watch.Stop(); Console.WriteLine("Double: " + watch.ElapsedMilliseconds); watch.Restart(); Mutltiplication.MultiplicateDecimal(1m, 10000000m, 2m); watch.Stop(); Console.WriteLine("Decimal: " + watch.ElapsedMilliseconds); // Division Console.WriteLine("Division:"); watch.Restart(); Division.DevideInt(10000000, 0, 2); watch.Stop(); Console.WriteLine("Int: " + watch.ElapsedMilliseconds); watch.Restart(); Division.DevideLong(10000000L, 0L, 2L); watch.Stop(); Console.WriteLine("Long: " + watch.ElapsedMilliseconds); watch.Restart(); Division.DevideFloat(10000000f, 0f, 2f); watch.Stop(); Console.WriteLine("Float: " + watch.ElapsedMilliseconds); watch.Restart(); Division.DevideDouble(10000000d, 0d, 2d); watch.Stop(); Console.WriteLine("Double: " + watch.ElapsedMilliseconds); watch.Restart(); Division.DevideDecimal(10000000m, 0m, 2m); watch.Stop(); Console.WriteLine("Decimal: " + watch.ElapsedMilliseconds); }
public void UpdateLastActivity() { _stopwatch?.Restart(); }
private static void BeginWatch(Stopwatch sw) { sw.Restart(); }
private static void Main(string[] args) { //TODO: // Filename similarity // Partial mode ParserResult <Options> parserResult = Parser.Default.ParseArguments <Options>(args); if (!(parserResult is Parsed <Options> parsedOptions)) { return; } Options o = parsedOptions.Value; if (o.DryRun) { Console.WriteLine("Dry-run enabled. Files won't actually be deleted."); } if (o.ShowProgress) { Console.WriteLine("Looking for files..."); } Stopwatch sw = null; TimeSpan filesElapsed = TimeSpan.Zero; if (o.ShowTiming) { sw = Stopwatch.StartNew(); } List <FileDetails> files = GetFiles(o.Directory, o.RecurseSubdirectories, o.EnableTimestamp, new FileFilter(o.MinSize, o.MaxSize, o.SkipHidden)).ToList(); if (sw != null) { filesElapsed = sw.Elapsed; } //Group files by size. This gives us groups with each more than 2 candidates. List <List <FileDetails> > sizeGroups = GroupBySize(files).ToList(); QueueStatus status = new QueueStatus(sizeGroups.Sum(x => x.Count), sizeGroups.Sum(x => x.Sum(y => y.Size))); if (o.ShowProgress) { status.PrintStatus(status); } sw?.Restart(); if (!o.DisableHash) { HashFiles(sizeGroups, status, o.ShowProgress, o.UseFastHash); } else { status.BytesProgress = status.BytesTotal; status.FilesProgress = status.FilesTotal; } if (o.ShowProgress) { status.PrintStatus(status); } if (sw != null) { Console.WriteLine($"It took {filesElapsed} to find files. It took {sw.Elapsed} to hash files."); } List <List <FileDetails> > withCriteria = GroupByCriteria(sizeGroups).ToList(); if (o.NoAsk) { DeleteAllButFirst(withCriteria, o.DryRun); } else { HandleDupes(withCriteria, o.DryRun); } }
public void Start() { _watcher?.Restart(); StartInternal(); }