private void CheckEof()
 {
     if (_eofs.All(v => v.Value))
     {
         SendEof();
     }
 }
Example #2
0
        public async Task <bool> EndTuningAsync(string clientId)
        {
            try
            {
                if (!_clientChannels.TryRemove(clientId, out ChannelInfo channel))
                {
                    return(false);
                }

                if (channel != null && _clientChannels.All(c => c.Value?.ChannelId != channel.ChannelId))
                {
                    ITimeshiftControlEx timeshiftControl = ServiceRegistration.Get <ITvProvider>() as ITimeshiftControlEx;
                    if (!(await timeshiftControl.StopTimeshiftAsync(TV_USER_NAME, channel.SlotIndex).ConfigureAwait(false)))
                    {
                        _logger.Error("SlimTvHandler: Couldn't stop timeshifting for channel {0}", channel.ChannelId);
                        return(false);
                    }
                }

                return(true);
            }
            catch (Exception ex)
            {
                _logger.Error("SlimTvHandler: Error ending tuning for client {0}", ex, clientId);
            }
            return(false);
        }
Example #3
0
    public async Task ItWorks(int messageCount, int consumers)
    {
        var tempDirectory = NewTempDirectory();

        var client = Configure.With(Using(new BuiltinHandlerActivator()))
                     .Logging(l => l.Console(LogLevel.Info))
                     .Transport(t => t.UseFileSystemAsOneWayClient(tempDirectory))
                     .Routing(r => r.TypeBased().Map <string>("consumer"))
                     .Start();

        var messages      = new HashSet <string>(Enumerable.Range(0, messageCount).Select(o => $"THIS IS MESSAGE {o}"));
        var messageCounts = new ConcurrentDictionary <string, int>();

        consumers.Times(() => StartConsumer(messageCounts, tempDirectory));

        foreach (var message in messages)
        {
            await client.Send(message);
        }

        await messageCounts.WaitUntil(d => d.Count >= messageCount, timeoutSeconds : 10);

        // wait 1 extra second for unexpected messages to arrive...
        await Task.Delay(TimeSpan.FromSeconds(1));

        Assert.That(messages.OrderBy(m => m), Is.EqualTo(messageCounts.Keys.OrderBy(m => m)));
        Assert.That(messageCounts.All(c => c.Value == 1), Is.True, $@"Not all message counts were exactly 0:

{string.Join(Environment.NewLine, messageCounts.Where(kvp => kvp.Value > 1).Select(kvp => $"    {kvp.Key}: {kvp.Value}"))}
");
    }
 public static void RefreshAll()
 {
     Instances.All(i => {
         i.Value.Refresh();
         return(true);
     });
 }
        /// <summary>
        /// Adds a Reference.
        /// </summary>
        public static void Add(int Handle, int SpecificHandle, object proc)
        {
#if !__IOS__
            if (proc == null)
            {
                return;
            }

            if (proc.Equals(Freeproc))
            {
                return;
            }

            var key = Tuple.Create(Handle, SpecificHandle);

            var contains = Procedures.ContainsKey(key);

            if (Freeproc != null && Procedures.All(pair => pair.Key.Item1 != Handle))
            {
                Bass.ChannelSetSync(Handle, SyncFlags.Free, 0, Freeproc);
            }

            if (contains)
            {
                Procedures[key] = proc;
            }
            else
            {
                Procedures.TryAdd(key, proc);
            }
#endif
        }
Example #6
0
 public void Push(GyroQuaternion quat)
 {
     if (_calibrationDictionary.All(x => x.Key != quat.sensorName))
     {
         _calibrationDictionary.TryAdd(quat.sensorName, new GyroQuaternion(quat.sensorName, quat.qX, quat.qY, quat.qZ, quat.qW));
     }
 }
Example #7
0
        public Container ClearContextId()
        {
            _builders.All(x => x.Value.Clean(_contextId.Value));
            _contextId.Value = new Guid();

            return(this);
        }
Example #8
0
        public Dictionary <string, decimal> Solve(int n = 4, decimal d = 0.85M)
        {
            bool converged = false;
            Dictionary <string, ISet <string> >    pointingDict = GetPointingDict(Graph);
            ConcurrentDictionary <string, decimal> pageRankDict = new ConcurrentDictionary <string, decimal>();
            var nodeArray = Graph.Keys.ToArray();

            foreach (string key in Graph.Keys)
            {
                pageRankDict[key] = 1.0M / Graph.Count;
            }

            ConcurrentDictionary <string, decimal> pageRankDictLast = pageRankDict;

            List <Action> actions = new List <Action>();

            while (!converged)
            {
                pageRankDictLast = pageRankDict;
                pageRankDict     = new ConcurrentDictionary <string, decimal>(pageRankDictLast);

                var threads = actions.Select(action => new Thread(() => action())).ToArray();

                Parallel.For(0, n, i =>
                {
                    int range      = Graph.Count / 4;
                    int startIndex = i * range;
                    int endIndex   = (i + 1) < n ? (i + 1) * range : Graph.Count;

                    for (int j = startIndex; j < endIndex; ++j)
                    {
                        string node         = nodeArray[j];
                        decimal newPageRank = pointingDict[node].Aggregate(
                            0.0M,
                            (accumulator, node) => accumulator + pageRankDictLast[node] / Graph[node].Count
                            );

                        newPageRank        = (1.0M - d) + d * newPageRank;
                        pageRankDict[node] = newPageRank;
                    }
                });

                foreach (var thread in threads)
                {
                    thread.Start();
                }

                foreach (var thread in threads)
                {
                    thread.Join();
                }
                converged = pageRankDict.All((entry) => entry.Value == pageRankDictLast[entry.Key]);
            }

            decimal sum = pageRankDict.Values.Sum();
            Dictionary <string, decimal> result = pageRankDict.ToDictionary(entry => entry.Key, entry => entry.Value / sum);

            return(result);
        }
Example #9
0
        public void AddToParty(Guid guid, string username)
        {
            if (_knownPartyEntities.All(x => x.Key != guid))
            {
                _knownPartyEntities.TryAdd(guid, username);
            }

            SetPartyMemberUi();
        }
Example #10
0
        private void TryCancel()
        {
            bool shouldCancel = Clients.All(pair => !pair.Value.IsConnected);

            if (shouldCancel)
            {
                _cancellationTokenSource.Cancel();
            }
        }
Example #11
0
 public void DeleteAllUnknownFiles()
 {
     if (_fileInfos.All(fi => fi.Value.FileType != KVFileType.Unknown))
     {
         return;
     }
     foreach (var fileId in _fileInfos.Where(fi => fi.Value.FileType == KVFileType.Unknown).Select(fi => fi.Key).ToArray())
     {
         _fileCollection.GetFile(fileId).Remove();
         _fileInfos.TryRemove(fileId);
     }
 }
Example #12
0
        public async Task SendAndReceiveManyMessages(int messageCount)
        {
            var allMessagesReceived = new ManualResetEvent(false);
            var idCounts            = new ConcurrentDictionary <int, int>();
            var sentMessages        = 0;
            var receivedMessages    = 0;
            var stopWatch           = new Stopwatch();
            var inputQueueAddress   = TestConfig.GetName("input1");

            var bus1 = _busFactory.GetBus <MessageWithId>(inputQueueAddress, async msg =>
            {
                idCounts.AddOrUpdate(msg.Id, 1, (id, old) => old + 1);

                Interlocked.Increment(ref receivedMessages);

                if (receivedMessages >= messageCount)
                {
                    stopWatch.Stop();
                    Console.WriteLine("DONE: took time:" + stopWatch.ElapsedMilliseconds + "ms");
                    allMessagesReceived.Set();
                }
            });

            var messagesToSend = Enumerable.Range(0, messageCount)
                                 .Select(id => new MessageWithId(id))
                                 .ToList();

            using (var printTimer = new Timer(5000))
            {
                printTimer.Elapsed += delegate { Console.WriteLine("Sent: {0}, Received: {1}", sentMessages, receivedMessages); };
                printTimer.Start();
                stopWatch.Start();
                Console.WriteLine("Sending {0} messages", messageCount);
                await Task.WhenAll(messagesToSend.Select(async msg =>
                {
                    await bus1.SendLocal(msg);
                    Interlocked.Increment(ref sentMessages);
                }));

                var timeout = TimeSpan.FromSeconds(messageCount * 0.01 + 100);
                Console.WriteLine("Waiting up to {0} seconds", timeout.TotalSeconds);
                allMessagesReceived.WaitOrDie(timeout, errorMessageFactory: () => GenerateErrorText(idCounts));
            }

            Console.WriteLine("Waiting one more second in case messages are still dripping in...");
            await Task.Delay(1000);

            var errorText = GenerateErrorText(idCounts);

            Assert.That(idCounts.Count, Is.EqualTo(messageCount), errorText);
            Assert.That(idCounts.All(c => c.Value == 1), errorText);
        }
Example #13
0
        /// <summary>
        /// Stop monitoring of events.
        /// </summary>
        /// <returns></returns>
        public async Task <StopResult> StopAsync()
        {
            if (_observedTimestamps == null)
            {
                return(new StopResult());
            }

            Interlocked.Exchange(ref _shuttingDown, 1);

            var endTime = DateTime.UtcNow;
            // to finish up messages related to a timestamp, we collect some more time
            await Task.Delay(TimeSpan.FromSeconds(5));

            // check one last time if all messages related for timestamp are received
            CheckForMissingTimestamps();
            bool allInExpectedInterval = _observedTimestamps.IsEmpty;

            _logger.LogInformation("Number of incomplete timestamps while stopping: {IncompleteTimestamps}", _observedTimestamps.Count);

            if (_cancellationTokenSource != null)
            {
                _cancellationTokenSource.Cancel();
                _cancellationTokenSource = null;
            }

            // the stop procedure takes about a minute, so we fire and forget.
            StopEventProcessorClientAsync().SafeFireAndForget(e => _logger.LogError(e, "Error while stopping event monitoring."));

            bool allExpectedValueChanges = true;

            if (_currentConfiguration.ExpectedValueChangesPerTimestamp > 0)
            {
                // TODO collect "expected" parameter as groups related to OPC UA nodes
                allExpectedValueChanges = _valueChangesPerNodeId?.All(kvp =>
                                                                      (kvp.Value / _totalValueChangesCount) == _currentConfiguration
                                                                      .ExpectedValueChangesPerTimestamp) ??
                                          false;
                _logger.LogInformation("All expected value changes received: {AllExpectedValueChanges}",
                                       allExpectedValueChanges);
            }

            return(new StopResult()
            {
                ValueChangesByNodeId = new ReadOnlyDictionary <string, int>(_valueChangesPerNodeId ?? new ConcurrentDictionary <string, int>()),
                AllExpectedValueChanges = allExpectedValueChanges,
                TotalValueChangesCount = _totalValueChangesCount,
                AllInExpectedInterval = allInExpectedInterval,
                StartTime = _startTime,
                EndTime = endTime,
            });
        }
Example #14
0
        public async Task <HealthCheckResult> CheckHealthAsync(CancellationToken cancellationToken)
        {
            NuGetDownloadResult nuGetDownloadResult = await _nuGetDownloadClient.DownloadNuGetAsync(
                NuGetDownloadSettings.Default,
                _logger,
                _httpClient.CreateClient("NuGet"),
                cancellationToken);

            if (!nuGetDownloadResult.Succeeded)
            {
                return(new HealthCheckResult(false));
            }

            var args = new List <string>
            {
                "Sources",
                "-Format",
                "Short"
            };

            var lines = new List <string>();

            ExitCode exitCode = await ProcessRunner.ExecuteProcessAsync(nuGetDownloadResult.NuGetExePath,
                                                                        args,
                                                                        (message, _) =>
            {
                lines.Add(message);
                _logger.Verbose("{Message}", message);
            },
                                                                        (message, category) => _logger.Verbose("{Category}{Message}", category, message),
                                                                        (message, category) => _logger.Verbose("{Category}{Message}", category, message),
                                                                        debugAction : (message, category) => _logger.Verbose("{Category}{Message}", category, message),
                                                                        cancellationToken : cancellationToken);

            if (!exitCode.IsSuccess)
            {
                return(new HealthCheckResult(false));
            }

            ConcurrentDictionary <Uri, bool?> nugetFeeds = GetFeedUrls(lines);

            List <Task> tasks = nugetFeeds.Keys
                                .Select(nugetFeed => CheckFeedAsync(cancellationToken, nugetFeed, nugetFeeds))
                                .ToList();

            await Task.WhenAll(tasks);

            bool allSucceeded = nugetFeeds.All(pair => pair.Value == true);

            return(new HealthCheckResult(allSucceeded));
        }
Example #15
0
        public void ProfilingMD_Ex2()
        {
            using (var c = Create())
            {
                ConnectionMultiplexer conn = c;
                var profiler = new ToyProfiler();

                conn.RegisterProfiler(profiler);

                var threads = new List <Thread>();

                var perThreadTimings = new ConcurrentDictionary <Thread, List <IProfiledCommand> >();

                for (var i = 0; i < 16; i++)
                {
                    var db = conn.GetDatabase(i);

                    var thread =
                        new Thread(
                            delegate()
                    {
                        var threadTasks = new List <Task>();

                        conn.BeginProfiling(Thread.CurrentThread);

                        for (var j = 0; j < 1000; j++)
                        {
                            var task = db.StringSetAsync("" + j, "" + j);
                            threadTasks.Add(task);
                        }

                        Task.WaitAll(threadTasks.ToArray());

                        perThreadTimings[Thread.CurrentThread] = conn.FinishProfiling(Thread.CurrentThread).ToList();
                    }
                            );

                    profiler.Contexts[thread] = thread;

                    threads.Add(thread);
                }

                threads.ForEach(thread => thread.Start());
                threads.ForEach(thread => thread.Join());

                Assert.AreEqual(16, perThreadTimings.Count);
                Assert.IsTrue(perThreadTimings.All(kv => kv.Value.Count == 1000));
            }
        }
Example #16
0
        private void OnMatch(SearchLib.Match match, int matchCount)
        {
            var fUpdateWordCount = new Action <Match>((m) =>
            {
                if (!lbTargets.Items.Cast <TargetWord>().Any(w => w.Word.ToLower() == m.Word.ToLower()))
                {
                    var targetWord = new TargetWord(m.Word, 1);
                    lbTargets.Items.Add(targetWord);
                }
                else
                {
                    for (int i = 0; i < lbTargets.Items.Count; i++)
                    {
                        var targetWord = lbTargets.Items[i] as TargetWord;
                        if (targetWord.Word.ToLower() == m.Word.ToLower())
                        {
                            targetWord.MatchCount++;
                            lbTargets.Items[i] = targetWord;
                            break;
                        }
                    }
                }
            });

            if (this.InvokeRequired)
            {
                this.Invoke(fUpdateWordCount, match);
            }
            else
            {
                fUpdateWordCount(match);
            }

            // If we have other matches for this file, then append to that set
            if (_matchResults.ContainsKey(match.File))
            {
                // Get matches for file
                var fileMatches = _matchResults[match.File];
                // Append to it
                fileMatches.Matches.Add(match);

                // If we're at the last file and
                if (_lastFile && _matchResults.All(p => p.Value.MatchCount == p.Value.Matches.Count))
                {
                    OnComplete();
                }
            }
            AddMatchToResults(match, matchCount, true);
        }
        public async void LogoutAsync()
        {
            var callback = OperationContext.Current.GetCallbackChannel <IClientCallback>();

            if (_clients.All(c => c.Value.Callback != callback))
            {
                return;
            }

            var taskFaktory = new TaskFactory();
            await taskFaktory.StartNew(() =>
            {
                var clientInfo = _clients.First(c => c.Value.Callback == callback);

                ClientConnection clientConnection;
                if (_clients.TryRemove(clientInfo.Value.User.Login, out clientConnection))
                {
                    foreach (var conn in _clients.Where(c => c.Value != clientConnection))
                    {
                        conn.Value.Callback.UserLoggedOut(clientConnection.User);
                    }
                }
            });
        }
Example #18
0
        private Task ShardReadyAsync(ShardReadyEventArgs e)
        {
            _shardsReady.AddOrUpdate(e.SessionId, true, (shardKey, value) => true);
            if (_shardsReady.Count == RiasBot.Shards.Count && _shardsReady.All(x => x.Value))
            {
                RiasBot.ShardReady -= ShardReadyAsync;
                Log.Information("All shards are connected");

                RiasBot.GetRequiredService <MuteService>();

                var reactionsService = RiasBot.GetRequiredService <ReactionsService>();
                reactionsService.WeebUserAgent = $"{RiasBot.CurrentUser.Name}/{Rias.Version}";
                reactionsService.AddWeebUserAgent();
            }

            return(Task.CompletedTask);
        }
Example #19
0
        public Task Run()
        {
            return(Task.Factory.StartNew(() => {
                Console.WriteLine("Switch started");

                while (!buffer.IsCompleted)
                {
                    if (buffer.TryTake(out var msg, 500))
                    {
                        if (Computers.TryGetValue(msg.Address, out var computer))
                        {
                            //Console.WriteLine($"From {msg.From} : {msg.Address} - {msg.X} - {msg.Y}");
                            computer.Receive(msg);
                        }
                        else
                        {
                            if (NATMessage == null)
                            {
                                Console.WriteLine($"From {msg.From} : First NAT Message {msg.Address} - {msg.X} - {msg.Y}");
                            }
                            NATMessage = msg;
                            //Console.WriteLine($"From {msg.From} : NAT Message {msg.Address} - {msg.X} - {msg.Y}");
                        }
                    }
                    else
                    {
                        if (Computers.All(x => x.Value.Idle))
                        {
                            //Console.WriteLine("All computers IDLE");
                            if (PreviousNATMessage != null)
                            {
                                if (NATMessage.Y == PreviousNATMessage.Y)
                                {
                                    Console.WriteLine($"Message twice delivered in a row : {NATMessage.Y}");
                                    buffer.CompleteAdding();
                                }
                            }

                            //Console.WriteLine($"Sending NAT : {NATMessage.Address} - {NATMessage.X} - {NATMessage.Y}");
                            Computers[0].Receive(NATMessage);
                            PreviousNATMessage = NATMessage;
                        }
                    }
                }
            }));
        }
Example #20
0
        public async Task <HealthCheckResult> CheckHealthAsync(CancellationToken cancellationToken)
        {
            if (string.IsNullOrWhiteSpace(_nuGetConfiguration.NugetExePath) ||
                !File.Exists(_nuGetConfiguration.NugetExePath))
            {
                _logger.Warning("Could not perform health checks of NuGet feeds, nuget.exe is missing");
                return(new HealthCheckResult(false));
            }

            var args = new List <string> {
                "Sources", "-Format", "Short"
            };

            var lines = new List <string>();

            var exitCode = await ProcessRunner.ExecuteProcessAsync(_nuGetConfiguration.NugetExePath,
                                                                   args,
                                                                   (message, _) =>
            {
                lines.Add(message);
                _logger.Verbose("{Message}", message);
            },
                                                                   (message, category) => _logger.Verbose("{Category}{Message}", category, message),
                                                                   (message, category) => _logger.Verbose("{Category}{Message}", category, message),
                                                                   debugAction : (message, category) => _logger.Verbose("{Category}{Message}", category, message),
                                                                   cancellationToken : cancellationToken);

            if (!exitCode.IsSuccess)
            {
                return(new HealthCheckResult(false));
            }

            ConcurrentDictionary <Uri, bool?> nugetFeeds = GetFeedUrls(lines);

            var tasks = nugetFeeds.Keys
                        .Select(nugetFeed => CheckFeedAsync(nugetFeed, nugetFeeds, cancellationToken))
                        .ToList();

            await Task.WhenAll(tasks);

            bool allSucceeded = nugetFeeds.All(pair => pair.Value == true);

            return(new HealthCheckResult(allSucceeded));
        }
        public static IEnumerable <DeviceErr> AddClient(IEnumerable <DeviceInfo> devicesList)
        {
            var res = new List <DeviceErr>();

            foreach (var device in devicesList)
            {
                var deviceId = device.DeviceId;
                if (_clients.ContainsKey(deviceId))
                {
                    res.Add(new DeviceErr(deviceId, Error.DeviceIsExist));
                }
            }
            foreach (var deviceInfo in devicesList.Where(x => _clients.All(y => y.Key != x.DeviceId)))
            {
                var deviceId = deviceInfo.DeviceId;
                res.Add(new DeviceErr(deviceId, AddClient(deviceInfo) ? Error.Success : Error.DeviceIsExist));
            }

            return(res);
        }
Example #22
0
        public void MoveCenters()
        {
            var isOptimizedMap = new ConcurrentDictionary <int, bool>();

            Enumerable.Range(0, Clusters).AsParallel().ForAll(i =>
            {
                var clusterTotal = ClusterMap.Count(c => c.Value == i);

                if (clusterTotal > 0)
                {
                    DenseVector vectorSum = null;

                    for (var m = 0; m < LSA.MatrixContainer.VMatrix.ColumnCount; m++)
                    {
                        if (ClusterMap[m] == i)
                        {
                            if (vectorSum == null)
                            {
                                vectorSum = (DenseVector)LSA.MatrixContainer.VMatrix.Column(m);
                            }
                            else
                            {
                                vectorSum += (DenseVector)LSA.MatrixContainer.VMatrix.Column(m);
                            }
                        }
                    }

                    var newCenter = (vectorSum / clusterTotal).ToArray();

                    if (Centers[i] != null)
                    {
                        isOptimizedMap[i] = Distance.Cosine(Centers[i], newCenter) < OptimizationVarianceThreshold;
                    }

                    Centers[i] = newCenter;
                }
            });

            IsOptimized = isOptimizedMap.All(v => v.Value == true);
        }
Example #23
0
        public int Run(EventLog eventLog)
        {
            eventLogInstance = eventLog;

            ConcurrentDictionary <string, bool> runningThreads = new ConcurrentDictionary <string, bool>();

            DllManager dllManager = new DllManager();

            Thread helloThread = new Thread(() => CheckForNewDll(runningThreads));

            helloThread.IsBackground = true;
            helloThread.Start();


            Thread exportThread = new Thread(() => DoingSomething("export 11", runningThreads));

            exportThread.IsBackground = true;
            exportThread.Start();


            Thread importThread = new Thread(() => DoingSomething("import 11", runningThreads));

            importThread.IsBackground = true;
            importThread.Start();


            while (true)
            {
                Thread.Sleep(5000);

                bool areAllThreadsStopped = runningThreads.All(x => x.Value == false);

                if (areAllThreadsStopped)
                {
                    LogUtil.Log("***** ALL THREADS HAVE BEEN STOPPED *****");
                    return(1);
                }
            }
        }
Example #24
0
        protected virtual void Dispose(bool disposing)
        {
            if (disposed)
            {
                return;
            }

            if (disposing)
            {
                lock (this)
                {
                    while (!clients.All(isClientFinished))
                    {
                        Monitor.Wait(this, resendTimeout);
                    }
                }

                udpclient.Close();
            }

            disposed = true;
        }
Example #25
0
        /// <summary>
        /// Gets and updates the fill forward resolution by checking specified subscription configurations and
        /// selecting the smallest resoluton not equal to tick
        /// </summary>
        private void UpdateFillForwardResolution(FillForwardResolutionOperation operation, SubscriptionDataConfig configuration)
        {
            // Due to performance implications let's be jealous in updating the _fillForwardResolution
            if (ValidateFillForwardResolution(configuration) &&
                (
                    (new[] { FillForwardResolutionOperation.BeforeAdd, FillForwardResolutionOperation.AfterAdd }.Contains(operation) &&
                     configuration.Increment != _fillForwardResolution.Value)    // check if the new Increment is different
                    ||
                    (operation == FillForwardResolutionOperation.AfterRemove && // We are removing
                     configuration.Increment == _fillForwardResolution.Value && // True: We are removing the resolution we were using
                     _subscriptions.All(x => x.Key.Resolution != configuration.Resolution)))   // False: there is at least another one equal, no need to update
                )
            {
                var configurations = (operation == FillForwardResolutionOperation.BeforeAdd)
                    ? _subscriptions.Keys.Concat(new[] { configuration }) : _subscriptions.Keys;

                _fillForwardResolution.Value = configurations.Where(ValidateFillForwardResolution)
                                               .Select(x => x.Resolution)
                                               .Distinct()
                                               .DefaultIfEmpty(Resolution.Minute)
                                               .Min().ToTimeSpan();
            }
        }
        public static string checkMagazine(string[] magazine, string[] note)
        {
            // Concurrency dictionary affects performance but brings utility functions that were implemented in later versions of the .NetFramework which are currently unavailable for the online compiler
            var magazineDict = new ConcurrentDictionary <string, int>();

            foreach (var str in magazine)
            {
                magazineDict.AddOrUpdate(str, 1, (x, count) => count + 1);
            }

            var noteDictionary = new ConcurrentDictionary <string, int>();

            foreach (var str in note)
            {
                noteDictionary.AddOrUpdate(str, 1, (x, count) => count + 1);
            }

            var result = noteDictionary.All(x => magazineDict.ContainsKey(x.Key) && magazineDict[x.Key] >= x.Value);

            Console.WriteLine(result ? "Yes" : "No");

            return(result ? "Yes" : "No");
        }
        /// <summary>
        /// Gets a connection for a new subscription or query. Can be an existing if there are open position or a new one.
        /// </summary>
        /// <param name="address">The address the socket is for</param>
        /// <param name="authenticated">Whether the socket should be authenticated</param>
        /// <returns></returns>
        protected virtual SocketConnection GetWebsocket(string address, bool authenticated)
        {
            var socketResult = sockets.Where(s => s.Value.Socket.Url == address && (s.Value.Authenticated == authenticated || !authenticated) && s.Value.Connected).OrderBy(s => s.Value.HandlerCount).FirstOrDefault();
            var result       = socketResult.Equals(default(KeyValuePair <int, SocketConnection>)) ? null : socketResult.Value;

            if (result != null)
            {
                if (result.HandlerCount < SocketCombineTarget || (sockets.Count >= MaxSocketConnections && sockets.All(s => s.Value.HandlerCount >= SocketCombineTarget)))
                {
                    // Use existing socket if it has less than target connections OR it has the least connections and we can't make new
                    return(result);
                }
            }

            // Create new socket
            var socket        = CreateSocket(address);
            var socketWrapper = new SocketConnection(this, socket);

            foreach (var kvp in genericHandlers)
            {
                socketWrapper.AddHandler(kvp.Key, false, kvp.Value);
            }
            return(socketWrapper);
        }
Example #28
0
 public bool IsFullySynched()
 {
     return(_Summaries.All(s => s.Value.Status != null && s.Value.Status.IsFullySynched));
 }
        public SetupGameState(
            Lobby lobby,
            int numExpectedPerUser,
            string unityTitle        = "Setup Time!",
            string unityInstructions = "",
            TimeSpan?setupDuration   = null)
            : base(lobby: lobby, exit: new WaitForUsers_StateExit(lobby))
        {
            ConcurrentDictionary <User, int> usersToNumSubmitted = new ConcurrentDictionary <User, int>();

            foreach (User user in lobby.GetAllUsers())
            {
                usersToNumSubmitted.AddOrReplace(user, 0);
            }
            StateChain setupChain = new StateChain(
                stateGenerator: (int counter) =>
            {
                if (counter < numExpectedPerUser)
                {
                    SimplePromptUserState setupUserState = new SimplePromptUserState(
                        promptGenerator: (User user) =>
                    {
                        return(CountingPromptGenerator(user, usersToNumSubmitted[user]));
                    },
                        formSubmitHandler: (User user, UserFormSubmission input) =>
                    {
                        (bool, string)handlerResponse = CountingFormSubmitHandler(user, input, usersToNumSubmitted[user]);
                        if (handlerResponse.Item1)
                        {
                            usersToNumSubmitted[user]++;
                        }
                        return(handlerResponse);
                    },
                        userTimeoutHandler: (User user, UserFormSubmission input) =>
                    {
                        return(CountingUserTimeoutHandler(user, input, usersToNumSubmitted[user]));
                    });

                    setupUserState.AddPerUserExitListener((User user) =>
                    {
                        if (usersToNumSubmitted.All(kvp => kvp.Value >= numExpectedPerUser))     // if after this users submission everyone has finished the expected amount it rushes everyone through
                        {
                            this.HurryUsers();
                        }
                    });

                    return(setupUserState);
                }
                else
                {
                    return(null);
                }
            },
                stateDuration: setupDuration);

            this.Entrance.Transition(setupChain);
            setupChain.Transition(this.Exit);
            this.Legacy_UnityView = new Legacy_UnityView(lobby)
            {
                ScreenId = new StaticAccessor <TVScreenId> {
                    Value = TVScreenId.WaitForUserInputs
                },
                Title = new StaticAccessor <string> {
                    Value = unityTitle
                },
                Instructions = new StaticAccessor <string> {
                    Value = unityInstructions
                },
            };
        }
Example #30
0
        public BermudaResult GetData(string domain, string query, string mapreduce, string merge, string paging, int remdepth, string command, string cursor, string paging2)
        {
            var args = ParseCommand(command);
            bool noCache = args.Contains("-nocache");
            bool makeCursor = cursor == MakeCursorToken;
            bool useCursor = !makeCursor && !string.IsNullOrWhiteSpace(cursor);

            DateTime minDate = DateTime.MinValue;
            DateTime maxDate = DateTime.MaxValue;

            if (remdepth > 0)
            {
                //map
                var queryHash = cursor ?? GetQueryHash(domain, query, mapreduce, merge, paging, null);

                //reduce 
                BermudaResult cachedDatapoints;
                if (!noCache && CachedData.TryGetValue(queryHash, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
#if DEBUG
                    if (CacheTraceMessageLevel < 3) Trace.WriteLine("returned CACHED BLOBS DATAPOINTS results FOR ENTIRE BLOB SET [REMDEPTH:" + remdepth + "]");
#endif

                    if (useCursor)
                    {
                        var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints.DataType);
                        return GetCursorData(paging2, cachedDatapoints, dataType);
                    }
                    else
                    {
                        return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, Metadata = new BermudaNodeStatistic { Notes = "Cache_Hit_1" }, CacheKey = cachedDatapoints.CacheKey };
                    }
                }
                else
                {
                    if (useCursor) throw new Exception("Cursor " + cursor + " not found");
                    //var assignments = PartitionBlobs(domain, blobInterfaces, minDate, maxDate, false, true);

                    var reducers = HostEnvironment.Instance.GetAvailablePeerConnections();

                    if (!reducers.Any()) throw new Exception("Specified dataset not loaded: " + domain);

                    ConcurrentDictionary<PeerInfo, BermudaResult> results = new ConcurrentDictionary<PeerInfo, BermudaResult>();
                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    List<Task> tasks = new List<Task>();
                    foreach (var reducer in reducers)
                    {
                        Task t = new Task((peerObj) =>
                        {
                            var peerInfo = peerObj as PeerInfo;
                            var initiated = DateTime.Now;
                            var subqueryHash = GetQueryHash(domain, query, mapreduce, merge, paging, peerInfo.ToString());
                            Stopwatch sw3 = new Stopwatch();
                            sw3.Start();

                            //see if the cache contains a matching result and return it if it's not outdated
                            BermudaResult cachedDatapoints2;
                            if (!noCache && CachedData.TryGetValue(subqueryHash, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                            {
                                if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB DATAPOINT results FOR BLOB SUBSET [REMDEPTH:" + remdepth + "]");

                                BermudaResult res = null;

                                if (useCursor) 
                                {
                                    var dataType2 = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints2.DataType);
                                    res = GetCursorData(paging2, cachedDatapoints2, dataType2);
                                }
                                else 
                                {
                                    res = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, Metadata = new BermudaNodeStatistic { Notes = "Cache_Hit_2" } };
                                }
                                
                                results[peerInfo] = res;
                            }
                            else
                            {
                                try
                                {
                                    Stopwatch sw2 = new Stopwatch();
                                    sw2.Start();
                                    BermudaResult subresult = null;

                                    if (peerInfo.Equals(Endpoint))
                                    {
                                        subresult = GetData(domain, query, mapreduce, merge, paging, remdepth - 1, command, cursor, paging2);

                                    }
                                    else
                                    {
                                        using (var client = HostEnvironment.GetServiceClient(peerInfo))
                                        {
                                            subresult = client.GetData(domain, query, mapreduce, merge, paging, remdepth - 1, command, cursor, paging2);
                                        }
                                        //subresult = GetDataFromPeer(domain, query, mapreduce, merge, minDate, maxDate, remdepth - 1, command, assignment.PeerEndpoint.Endpoint);
                                    }

                                    sw2.Stop();
                                    subresult.CreatedOn = DateTime.Now.Ticks;
                                    subresult.Metadata.Initiated = initiated;
                                    subresult.Metadata.Completed = DateTime.Now;
                                    subresult.Metadata.OperationTime = sw2.Elapsed;
                                    results[peerInfo] = CachedData[subqueryHash] = subresult;
                                }
                                catch (Exception ex)
                                {
                                    results[peerInfo] = new BermudaResult { Error = "[Failed Node] " + ex };
                                }
                            }
                        }, reducer, TaskCreationOptions.LongRunning);

                        tasks.Add(t);
                        t.Start();
                    }

                    Task.WaitAll(tasks.ToArray());

                    sw.Stop();

#if DEBUG
                    Trace.WriteLine("Join Time:" + sw.Elapsed);
#endif

                    if (results.Any(x => x.Value.Error != null)) throw new BermudaException("Some nodes failed:\r\n" + string.Join("\r\n", results.Select(x => x.Value.Error)));

                    if (results.All(x => x.Value.Data == null)) return new BermudaResult { Metadata = new BermudaNodeStatistic { Notes = "No Data" } };

                    //if all results are not the same time throw an error
                    if (results.GroupBy(x => x.Value.DataType).Count() > 1) throw new BermudaException("Subresults must all return the same type");

                    var dataTypeDescriptor = results.Select(x => x.Value.DataType).FirstOrDefault(x => x != null);

                    if (dataTypeDescriptor == null) return new BermudaResult { Error = "Could not determine the merge type, none of the nodes provided type info" };

                    //use the passed combine espression to make multiple datapoint sets into one

                    var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(dataTypeDescriptor);

                    //allItems = results.Values.SelectMany(x => x.DataObject)

                    var totalJson = "[" + string.Join(",", results.Values.Where(x => !string.IsNullOrWhiteSpace(x.Data)).Select(x => x.Data.Trim('[', ']')).Where(x => !string.IsNullOrWhiteSpace(x))) + "]";

                    var allItems = LinqRuntimeTypeBuilder.DeserializeJson(totalJson, dataTypeDescriptor, true);


                    //var aaa = new JavaScriptSerializer().Deserialize<Datapoint[]>(totalJson);
                    //var ggc = aaa.GroupBy(x => new { x.Id, x.Id2 }).Count();

                    //InvokeSelectManyViaReflectionTheKilla(results.Values.Select(x => x.DataObject), dataType);

                    var mergeFunc = GetMergeFunc(merge, mapreduce, dataType, dataType);
                    if (mergeFunc != null)
                    {
                        //var dataType = "kdsajkdsa";
                        var mergeInvokeMethod = mergeFunc.GetType().GetMethod("Invoke");
                        allItems = mergeInvokeMethod.Invoke(mergeFunc, new object[] { allItems }); // MergeDatapoints(results.Values.Where(x => x.Data != null).SelectMany(x => x.Data), mergeFunc);
                    }

                    var pagingFunc = GetPagingFunc(paging, dataType);
                    if (pagingFunc != null)
                    {
                        var pagingInvokeMethod = pagingFunc.GetType().GetMethod("Invoke");
                        allItems = pagingInvokeMethod.Invoke(pagingFunc, new object[] { allItems });
                    }

                    //figure out the metadata
                    var finalMetadata = new BermudaNodeStatistic { Notes = "Merged Datapoints in " + sw.Elapsed, NodeId = HostEnvironment.Instance.CurrentInstanceId, ChildNodes = results.Values.Select(x => x.Metadata).ToArray() };

                    var arraylol = ToArrayCollection(allItems, dataType);

                    var json = JsonConvert.SerializeObject(arraylol);
                    //var json = JsonConvert.SerializeObject(allItems);

                    var originalData = makeCursor ? arraylol : null;

                    var finalResult = new BermudaResult { DataType = dataTypeDescriptor, OriginalData = originalData, Data = json, CreatedOn = DateTime.Now.Ticks, Metadata = finalMetadata, CacheKey = queryHash };

                    CachedData[queryHash] = finalResult;

                    return finalResult;
                }
            }
            else
            {
                ConcurrentDictionary<string, BermudaResult> results = new ConcurrentDictionary<string, BermudaResult>();
                BermudaNodeStatistic stats = new BermudaNodeStatistic();

                var bucketInterfaces = HostEnvironment.Instance.GetBucketInterfacesForDomain(domain);

                if (!bucketInterfaces.Any()) throw new BermudaException("Data not loaded for: " + domain);
                if (bucketInterfaces.Count() > 1) throw new BermudaException("Multiple buckets not supported by BermudaMapReduce");

                var queryHash = GetQueryHash(domain, query, mapreduce, merge, paging, Endpoint.ToString());

                BermudaResult cachedDatapoints;
                if (!noCache && CachedData.TryGetValue(queryHash, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
                    if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB SET DATAPOINT results [REMDEPTH:" + remdepth + "]");

                    if (useCursor)
                    {
                        var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints.DataType);
                        return GetCursorData(paging2, cachedDatapoints, dataType);
                    }
                    else
                    {
                        return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, Metadata = new BermudaNodeStatistic { Notes = "Cache_Hit_3" }, CacheKey = queryHash };
                    }
                }
                else
                {
                    //Chad: short circuiting to test WCF response time in Azure
                    //return new DatapointResult() { Datapoints = new List<Datapoint>(), CreatedOn = DateTime.Now.Ticks, Metadata = new BermudaNodeStatistic() };


                    //IEnumerable<Datapoint> datapoints = null;
                    object datapoints = null;

                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    Type itemType = null;
                    Type resultType = null;
                    string json = null;

                    foreach (var bucketInterface in bucketInterfaces)
                    {
                        var bucketKey = GetQueryHash(domain, query, mapreduce, merge, paging, Endpoint.ToString());

                        //see if the cache contains a matching result and return it if it's not outdated
                        BermudaResult cachedDatapoints2;
                        if (!noCache && CachedData.TryGetValue(bucketKey, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                        {
                            if (CacheTraceMessageLevel < 1) Trace.WriteLine("returned CACHED BLOB DATAPOINT results  [REMDEPTH:" + remdepth + "]");

                            if (useCursor)
                            {
                                if (cachedDatapoints2.OriginalData == null) throw new Exception("Cursor " + cursor + " contains null data");
                                var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints2.DataType);
                                results[bucketInterface.Name] = GetCursorData(paging2, cachedDatapoints2, dataType);
                                
                            }
                            else
                            {
                                results[bucketInterface.Name] = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, Metadata = new BermudaNodeStatistic { Notes = "Cache_Hit_4" } };
                                json = cachedDatapoints2.Data;
                            }
                        }
                        else
                        {
                            //get mentions
                            var collections = GetCollections(query, mapreduce);

                            if (collections.Count() > 1) throw new BermudaException("More than one collection specified: " + string.Join(",", collections));

                            var table = collections.FirstOrDefault();

                            var tableName = table == null ? null : table.Source;

                            var raw = bucketInterface.GetData(tableName);
                            //var rawType = raw.GetType();
                            //itemType = ReduceExpressionGeneration.GetTypeOfEnumerable(rawType);
                            itemType = bucketInterface.GetDataType(tableName);
                            var mapreduceFunc = GetMapReduceFunc(mapreduce, itemType, out resultType);
                            var queryFunc = GetFilterFunc(query, itemType);
                            var pagingFunc = GetPagingFunc(paging, resultType);
                    
                            var minDateTicks = minDate.Ticks;
                            var maxDateTicks = maxDate.Ticks;


                            object subresult = raw;
                             
                                //queryFunc == null ?
                                //    raw.AsParallel() :
                                //minDate == DateTime.MinValue && maxDate == DateTime.MaxValue ?
                                //    raw.AsParallel().Where(x => queryFunc) :
                                //    raw.AsParallel().Where(x => x.OccurredOnTicks >= minDateTicks && x.OccurredOnTicks <= maxDateTicks && queryFunc(x, parameters));

                            if (json == null)
                            {
                                if (queryFunc != null)
                                {
                                    var queryFuncInvoke = queryFunc.GetType().GetMethod("Invoke");
                                    subresult = queryFuncInvoke.Invoke(queryFunc, new object[] { subresult });
                                }

                                //reduce them using the passed expression
                                if (mapreduceFunc != null)
                                {
                                    var mapReduceFuncInvoke = mapreduceFunc.GetType().GetMethod("Invoke");
                                    subresult = mapReduceFuncInvoke.Invoke(mapreduceFunc, new object[] { subresult });
                                }

                                if (pagingFunc != null)
                                {
                                    var pagingInvokeMethod = pagingFunc.GetType().GetMethod("Invoke");
                                    subresult = pagingInvokeMethod.Invoke(pagingFunc, new object[] { subresult });
                                }


                                datapoints = subresult;
                            }

                            //format a metada string
                            if (!args.Contains("-nocount"))
                            {
                                stats.TotalItems = bucketInterface.GetCount(tableName);
                                //stats.FilteredItems = filtered.Count();
                                //stats.ReducedItems = subresult.Count();
                            }

                            //cache the result
                            //results[blobInterface.Name] = new DatapointResult { Datapoints = subresult, CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                            //CachedDatapoints[blobKey] = new DatapointResult { Datapoints = subresult.ToList(), CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                        }
                    }

                    //figure out the metadata
                    //var finalMetadata = "    [@" + AzureInterface.Instance.CurrentInstanceId + "] Calculated Datapoints:\r\n" + string.Join("\r\n", results.Values.Select(x => x.Metadata));

                    stats.NodeId = HostEnvironment.Instance.CurrentInstanceId;
                    stats.Notes = "Computed Datapoints";
                    
                    //Trace.WriteLine("total mentions processed: " + mentionCount);

                    //var datapoints = results.Values.SelectMany(x => x.Datapoints);
                    if (datapoints == null) return new BermudaResult() { Metadata = new BermudaNodeStatistic { Notes = "No Results" } };

                    //foreach (var p in datapoints) if (p.IsCount) p.Value = p.Count;

                    var mergeFunc = GetMergeFunc(merge, mapreduce, itemType, resultType);
                    if (mergeFunc != null)
                    {
                        var mergeFuncInvoke = mergeFunc.GetType().GetMethod("Invoke");
                        datapoints = mergeFuncInvoke.Invoke(mergeFunc, new object[] { datapoints });
                    }

                    stats.LinqExecutionTime = sw.Elapsed;

                    var arraylol = ToArrayCollection(datapoints, resultType);

                    if (json == null && datapoints != null)
                    {
                        json = JsonConvert.SerializeObject(arraylol);
                    }
                    
                    //var json = JsonConvert.SerializeObject(datapoints);
                    var originalData = makeCursor ? arraylol : null;

                    var result = CachedData[queryHash] = new BermudaResult { DataType = LinqRuntimeTypeBuilder.GetTypeKey(resultType), OriginalData = originalData, Data = json, CreatedOn = DateTime.Now.Ticks, Metadata = stats  };

                    sw.Stop();

                    return result;
                }
            }
        }
Example #31
0
        /// <summary>
        /// Downloads blocks from <paramref name="peers"/> in parallel,
        /// using the given <paramref name="blockFetcher"/> function.
        /// </summary>
        /// <param name="peers">A list of peers to download blocks.</param>
        /// <param name="blockFetcher">A function to take demands and a peer, and then
        /// download corresponding blocks.</param>
        /// <param name="singleSessionTimeout">A maximum time to wait each single call of
        /// <paramref name="blockFetcher"/>.  If a call is timed out unsatisfied demands
        /// are automatically retried to fetch from other peers.</param>
        /// <param name="cancellationToken">A cancellation token to observe while waiting
        /// for the task to complete.</param>
        /// <returns>An async enumerable that yields pairs of a fetched block and its source
        /// peer.  It terminates when all demands are satisfied.</returns>
        public async IAsyncEnumerable <Tuple <Block <TAction>, TPeer> > Complete(
            IReadOnlyList <TPeer> peers,
            BlockFetcher blockFetcher,
            TimeSpan singleSessionTimeout,
            [EnumeratorCancellation] CancellationToken cancellationToken = default
            )
        {
            if (!peers.Any())
            {
                throw new ArgumentException("The list of peers must not be empty.", nameof(peers));
            }

            var pool       = new PeerPool(peers);
            var queue      = new AsyncProducerConsumerQueue <Tuple <Block <TAction>, TPeer> >();
            var completion =
                new ConcurrentDictionary <HashDigest <SHA256>, bool>(_satisfiedBlocks);

            await foreach (var hashes in EnumerateChunks(cancellationToken))
            {
                cancellationToken.ThrowIfCancellationRequested();
                IList <HashDigest <SHA256> > hashDigests =
                    hashes is IList <HashDigest <SHA256> > l ? l : hashes.ToList();

                foreach (HashDigest <SHA256> hash in hashDigests)
                {
                    completion.TryAdd(hash, false);
                }

                cancellationToken.ThrowIfCancellationRequested();
                await pool.SpawnAsync(
                    async (peer, ct) =>
                {
                    ct.ThrowIfCancellationRequested();
                    var demands = new HashSet <HashDigest <SHA256> >(hashDigests);
                    try
                    {
                        _logger.Debug(
                            "Request blocks {BlockHashes} to {Peer}...",
                            hashDigests,
                            peer
                            );
                        var timeout = new CancellationTokenSource(singleSessionTimeout);
                        CancellationToken timeoutToken = timeout.Token;
                        timeoutToken.Register(() =>
                                              _logger.Debug("Timed out to wait a response from {Peer}.", peer)
                                              );
                        ct.Register(() => timeout.Cancel());

                        try
                        {
                            ConfiguredCancelableAsyncEnumerable <Block <TAction> > blocks =
                                blockFetcher(peer, hashDigests, timeoutToken)
                                .WithCancellation(timeoutToken);
                            await foreach (Block <TAction> block in blocks)
                            {
                                _logger.Debug(
                                    "Downloaded a block #{BlockIndex} {BlockHash} " +
                                    "from {Peer}.",
                                    block.Index,
                                    block.Hash,
                                    peer
                                    );

                                if (Satisfy(block))
                                {
                                    await queue.EnqueueAsync(
                                        Tuple.Create(block, peer),
                                        cancellationToken
                                        );
                                }

                                demands.Remove(block.Hash);
                            }
                        }
                        catch (OperationCanceledException e)
                        {
                            if (ct.IsCancellationRequested)
                            {
                                _logger.Error(
                                    e,
                                    "A blockFetcher job (peer: {Peer}) is cancelled.",
                                    peer
                                    );
                                throw;
                            }

                            _logger.Debug(
                                e,
                                "Timed out to wait a response from {Peer}.",
                                peer
                                );
                        }
                    }
                    finally
                    {
                        if (demands.Any())
                        {
                            _logger.Verbose(
                                "Fetched blocks from {Peer}, but there are still " +
                                "unsatisfied demands ({UnsatisfiedDemandsNumber}) so " +
                                "enqueue them again: {UnsatisfiedDemands}.",
                                peer,
                                demands.Count,
                                demands
                                );
                            Demand(demands, retry: true);
                        }
                        else
                        {
                            _logger.Verbose("Fetched blocks from {Peer}.", peer);
                        }
                    }
                },
                    cancellationToken : cancellationToken
                    );
            }

            while (!completion.All(kv => kv.Value))
            {
                Tuple <Block <TAction>, TPeer> pair;
                try
                {
                    pair = await queue.DequeueAsync(cancellationToken);
                }
                catch (InvalidOperationException)
                {
                    break;
                }

                yield return(pair);

                _logger.Verbose(
                    "Completed a block {BlockIndex} {BlockHash} from {Peer}.",
                    pair.Item1.Index,
                    pair.Item1.Hash,
                    pair.Item2
                    );
                completion[pair.Item1.Hash] = true;
            }

            _logger.Verbose("Completed all blocks ({Number}).", completion.Count);
        }
Example #32
0
        public BermudaResult GetData(string domain, IEnumerable<string> blobs, string query, string mapreduce, string merge, DateTime minDate, DateTime maxDate, int remdepth, object[] parameters, string command)
        {
            var args = ParseCommand(command);

            if (remdepth > 0)
            {
                //map
                var blobInterfaces = blobs == null ? AzureInterface.Instance.ListBlobs(domain, minDate.Ticks, maxDate.Ticks) : AzureInterface.Instance.GetBlobInterfacesByNames(domain, blobs);

                var blobSetKey = GetQueryChecksum(domain, string.Join(",", blobInterfaces.Select(x => x.Name)), query, mapreduce, minDate, maxDate, parameters, null);

                //reduce 
                BermudaResult cachedDatapoints;
                if (CachedData.TryGetValue(blobSetKey, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
                    if (CacheTraceMessageLevel < 3) Trace.WriteLine("returned CACHED BLOBS DATAPOINTS results FOR ENTIRE BLOB SET [REMDEPTH:" + remdepth + "]");
                    return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_1" } };
                }
                else
                {

                    var assignments = PartitionBlobs(domain, blobInterfaces, minDate, maxDate, false, true);

                    if (!assignments.Any()) throw new Exception("Specified dataset not loaded: " + domain);

                    ConcurrentDictionary<IPEndPoint, BermudaResult> results = new ConcurrentDictionary<IPEndPoint, BermudaResult>();
                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    List<Task> tasks = new List<Task>();
                    foreach (var ass in assignments)
                    {
                        Task t = new Task((assObj) =>
                        {
                            ZipMetadata assignment = assObj as ZipMetadata;
                            var initiated = DateTime.Now;
                            var blobSubsetKey = GetQueryChecksum(domain, string.Join(",", assignment.Blobs.Select(x => x.Name)), query, mapreduce, minDate, maxDate, parameters, assignment.PeerEndpoint.ToString());
                            Stopwatch sw3 = new Stopwatch();
                            sw3.Start();

                            //see if the cache contains a matching result and return it if it's not outdated
                            BermudaResult cachedDatapoints2;
                            if (CachedData.TryGetValue(blobSubsetKey, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                            {
                                if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB DATAPOINT results FOR BLOB SUBSET [REMDEPTH:" + remdepth + "]");
                                results[assignment.PeerEndpoint] = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_2" } };
                            }
                            else
                            {
                                try
                                {
                                    Stopwatch sw2 = new Stopwatch();
                                    sw2.Start();
                                    BermudaResult subresult = null;

                                    if (assignment.PeerEndpoint.Equals(Endpoint))
                                    {
                                        subresult = GetData(domain, assignment.Blobs.Select(x => x.Name), query, mapreduce, merge, minDate, maxDate, remdepth - 1, parameters, command);

                                    }
                                    else
                                    {
                                        using (var client = AzureInterface.Instance.GetServiceClient(assignment.PeerEndpoint))
                                        {
                                            subresult = client.GetData(domain, query, mapreduce, merge, minDate, maxDate, remdepth - 1, parameters, command);
                                        }
                                    }

                                    sw2.Stop();
                                    subresult.CreatedOn = DateTime.Now.Ticks;
                                    subresult.MetadataObject.Initiated = initiated;
                                    subresult.MetadataObject.Completed = DateTime.Now;
                                    subresult.MetadataObject.OperationTime = sw2.Elapsed;
                                    results[assignment.PeerEndpoint] = CachedData[blobSubsetKey] = subresult;
                                }
                                catch (Exception ex)
                                {
                                    results[assignment.PeerEndpoint] = new BermudaResult { Error = "[Failed Node] " + ex };
                                }
                            }
                        }, ass, TaskCreationOptions.LongRunning);

                        tasks.Add(t);
                        t.Start();
                    }

                    Task.WaitAll(tasks.ToArray());

                    sw.Stop();
                    Trace.WriteLine("Join Time:" + sw.Elapsed);

                    if (results.All(x => x.Value.Error != null)) throw new Exception("All nodes failed:\r\n" + string.Join("\r\n", results.Select(x => x.Value.Error)));

                    //if all results are not the same time throw an error
                    if (results.GroupBy(x => x.Value.DataType).Count() > 1) throw new Exception("Subresults must all return the same type");

                    var dataTypeDescriptor = results.Select(x => x.Value.DataType).FirstOrDefault(x => x != null);

                    if (dataTypeDescriptor == null) return new BermudaResult { Error = "Could not determine the merge type, none of the nodes provided type info" };

                    //use the passed combine espression to make multiple datapoint sets into one

                    var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(dataTypeDescriptor);

                    //allItems = results.Values.SelectMany(x => x.DataObject)

                    var totalJson = "[" + string.Join(",", results.Values.Select(x => x.Data.Trim('[', ']'))) + "]";

                    var allItems = LinqRuntimeTypeBuilder.DeserializeJson(totalJson, dataTypeDescriptor, true);
                        

                    //var aaa = new JavaScriptSerializer().Deserialize<Datapoint[]>(totalJson);
                    //var ggc = aaa.GroupBy(x => new { x.Id, x.Id2 }).Count();

                    //InvokeSelectManyViaReflectionTheKilla(results.Values.Select(x => x.DataObject), dataType);

                    var mergeFunc = GetMergeFunc(merge, mapreduce, dataType);
                    if (mergeFunc != null)
                    {
                        //var dataType = "kdsajkdsa";
                        var mergeInvokeMethod = mergeFunc.GetType().GetMethod("Invoke");
                        allItems = mergeInvokeMethod.Invoke(mergeFunc, new object[] { allItems }); // MergeDatapoints(results.Values.Where(x => x.Data != null).SelectMany(x => x.Data), mergeFunc);
                    }

                    //figure out the metadata
                    var finalMetadata = new BermudaNodeStatistic { Notes = "Merged Datapoints in " + sw.Elapsed, NodeId = AzureInterface.Instance.CurrentInstanceId, ChildNodes = results.Values.Select(x => x.MetadataObject ).ToArray() };

                    var finalResult = new BermudaResult { DataType = dataTypeDescriptor, DataObject = allItems, CreatedOn = DateTime.Now.Ticks, MetadataObject = finalMetadata };

                    CachedData[blobSetKey] = finalResult;

                    return finalResult;
                }
            }
            else
            {
                ConcurrentDictionary<string, BermudaResult> results = new ConcurrentDictionary<string, BermudaResult>();
                BermudaNodeStatistic stats = new BermudaNodeStatistic();

                var blobInterfaces = AzureInterface.Instance.GetBlobInterfacesByNames(domain, blobs);

                var blobSetKey = GetQueryChecksum(domain, string.Join(",", blobInterfaces.Select(x => x.Name)), query, mapreduce, minDate, maxDate, parameters, Endpoint.ToString());

                BermudaResult cachedDatapoints;
                if (CachedData.TryGetValue(blobSetKey, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
                    if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB SET DATAPOINT results [REMDEPTH:" + remdepth + "]");
                    return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_3" } };
                }
                else
                {
                    //Chad: short circuiting to test WCF response time in Azure
                    //return new DatapointResult() { Datapoints = new List<Datapoint>(), CreatedOn = DateTime.Now.Ticks, Metadata = new BermudaNodeStatistic() };

                   
                    //IEnumerable<Datapoint> datapoints = null;
                    object datapoints = null;

                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    Type itemType = null;
                    Type resultType = null;

                    foreach (var blobInterface in blobInterfaces)
                    {
                        var blobKey = GetQueryChecksum(domain, blobInterface.Name, query, mapreduce, minDate, maxDate, parameters, Endpoint.ToString());

                        //see if the cache contains a matching result and return it if it's not outdated
                        BermudaResult cachedDatapoints2;
                        if (CachedData.TryGetValue(blobKey, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                        {
                            if (CacheTraceMessageLevel < 1) Trace.WriteLine("returned CACHED BLOB DATAPOINT results  [REMDEPTH:" + remdepth + "]");
                            results[blobInterface.Name] = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_4" } };
                            datapoints = cachedDatapoints2.DataObject;
                        }
                        else
                        {
                            //get mentions
                            var raw = blobInterface.GetData();
                            var rawType = raw.GetType();
                            itemType = ReduceExpressionGeneration.GetTypeOfEnumerable(rawType);
                            var mapreduceFunc = GetMapReduceFunc(mapreduce, itemType, out resultType);
                            var queryFunc = GetFilterFunc(query, itemType);
                    
                            var minDateTicks = minDate.Ticks;
                            var maxDateTicks = maxDate.Ticks;


                            object subresult = raw.AsParallel();
                             
                                //queryFunc == null ?
                                //    raw.AsParallel() :
                                //minDate == DateTime.MinValue && maxDate == DateTime.MaxValue ?
                                //    raw.AsParallel().Where(x => queryFunc) :
                                //    raw.AsParallel().Where(x => x.OccurredOnTicks >= minDateTicks && x.OccurredOnTicks <= maxDateTicks && queryFunc(x, parameters));

                            if (queryFunc != null)
                            {
                                var queryFuncInvoke = queryFunc.GetType().GetMethod("Invoke");
                                subresult = queryFuncInvoke.Invoke(queryFunc, new object[] { subresult });
                            }

                            //reduce them using the passed expression
                            if (mapreduceFunc != null)
                            {
                                var mapReduceFuncInvoke = mapreduceFunc.GetType().GetMethod("Invoke");
                                subresult = mapReduceFuncInvoke.Invoke(mapreduceFunc, new object[] { subresult });
                            }
                            

                            datapoints = subresult;

                            //format a metada string
                            if (!args.Contains("-nocount"))
                            {
                                //stats.TotalItems = raw.Count();
                                //stats.FilteredItems = filtered.Count();
                                //stats.ReducedItems = subresult.Count();
                            }

                            //cache the result
                            //results[blobInterface.Name] = new DatapointResult { Datapoints = subresult, CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                            //CachedDatapoints[blobKey] = new DatapointResult { Datapoints = subresult.ToList(), CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                        }
                    }

                    //figure out the metadata
                    //var finalMetadata = "    [@" + AzureInterface.Instance.CurrentInstanceId + "] Calculated Datapoints:\r\n" + string.Join("\r\n", results.Values.Select(x => x.Metadata));

                    stats.NodeId = AzureInterface.Instance.CurrentInstanceId;
                    stats.Notes = "Computed Datapoints";
                    
                    //Trace.WriteLine("total mentions processed: " + mentionCount);

                    //var datapoints = results.Values.SelectMany(x => x.Datapoints);
                    if (datapoints == null) return new BermudaResult() { MetadataObject = new BermudaNodeStatistic { Notes = "No Results" } };

                    //foreach (var p in datapoints) if (p.IsCount) p.Value = p.Count;

                    var mergeFunc = resultType == null ? null : GetMergeFunc(merge, mapreduce, resultType);
                    if (mergeFunc != null)
                    {
                        var mergeFuncInvoke = mergeFunc.GetType().GetMethod("Invoke");
                        datapoints = mergeFuncInvoke.Invoke(mergeFunc, new object[] { datapoints });
                    }

                    sw.Stop();

                    stats.LinqExecutionTime = sw.Elapsed;

                    var result = CachedData[blobSetKey] = new BermudaResult { DataType = LinqRuntimeTypeBuilder.GetTypeKey(resultType), DataObject = datapoints, CreatedOn = DateTime.Now.Ticks, MetadataObject = stats  };

                    return result;
                }
            }
        }
        public void WeGetAllMessagesEvenThoughRabbitMqRestarts()
        {
            var messages = new ConcurrentDictionary<string, bool>();

            _receiver.Handle<string>(async message =>
            {
                Console.WriteLine($"Received '{message}'");
                await Task.Delay(500);
                messages[message] = true;
            });

            Console.WriteLine("Sending messages...");

            Enumerable.Range(0, 40)
                .Select(i => $"message number {i}")
                .ToList()
                .ForEach(message =>
                {
                    messages[message] = false;
                    _sender.Send(message).Wait();
                });

            Console.WriteLine("Waiting for all messages to have been handled...");

            // restart RabbitMQ while we are receiving messages
            ThreadPool.QueueUserWorkItem(_ =>
            {
                try
                {
                    Thread.Sleep(5000);
                    Console.WriteLine("Stopping RabbitMQ....");
                    Exec("net", "stop rabbitmq");
                    Thread.Sleep(1000);
                    Console.WriteLine("Starting RabbitMQ....");
                    Exec("net", "start rabbitmq");
                }
                catch (Exception exception)
                {
                    throw new AssertionException("Exception on background thread", exception);
                }
            });

            var stopwatch = Stopwatch.StartNew();
            while (true)
            {
                Thread.Sleep(100);

                if (messages.All(kvp => kvp.Value))
                {
                    Console.WriteLine("All messages received :)");
                    break;
                }

                if (stopwatch.Elapsed < TimeSpan.FromSeconds(40)) continue;

                throw new TimeoutException("Waited too long!");
            }
        }
        public void ProfilingMD_Ex2()
        {
            using (var c = Create())
            {
                ConnectionMultiplexer conn = c;
                var profiler = new ToyProfiler();

                conn.RegisterProfiler(profiler);

                var threads = new List<Thread>();

                var perThreadTimings = new ConcurrentDictionary<Thread, List<IProfiledCommand>>();

                for (var i = 0; i < 16; i++)
                {
                    var db = conn.GetDatabase(i);

                    var thread =
                        new Thread(
                            delegate()
                            {
                                var threadTasks = new List<Task>();

                                conn.BeginProfiling(Thread.CurrentThread);

                                for (var j = 0; j < 1000; j++)
                                {
                                    var task = db.StringSetAsync("" + j, "" + j);
                                    threadTasks.Add(task);
                                }

                                Task.WaitAll(threadTasks.ToArray());

                                perThreadTimings[Thread.CurrentThread] = conn.FinishProfiling(Thread.CurrentThread).ToList();
                            }
                        );

                    profiler.Contexts[thread] = thread;

                    threads.Add(thread);
                }
                
                threads.ForEach(thread => thread.Start());
                threads.ForEach(thread => thread.Join());

                Assert.AreEqual(16, perThreadTimings.Count);
                Assert.IsTrue(perThreadTimings.All(kv => kv.Value.Count == 1000));
            }
        }