public Task <IDictionary <int, Review[]> > GetReviewsByUserIdsAsync(params int[] userIds)
        {
            var index = userIds.ToDictionary(m => m);
            IDictionary <int, Review[]> resuts =
                _reviews.GroupBy(m => m.Value.UserId, m => m.Value)
                .Where(g => index.ContainsKey(g.Key))
                .ToDictionary(g => g.Key, g => g.ToArray());

            return(Task.FromResult(resuts));
        }
Example #2
0
        public async Task StopPoll(Channel ch)
        {
            NadekoBot.client.MessageReceived -= Vote;
            Poll throwaway;

            PollCommand.ActivePolls.TryRemove(e.Server, out throwaway);
            try {
                var results = participants.GroupBy(kvp => kvp.Value)
                              .ToDictionary(x => x.Key, x => x.Sum(kvp => 1))
                              .OrderBy(kvp => kvp.Value);

                int totalVotesCast = results.Sum(kvp => kvp.Value);
                if (totalVotesCast == 0)
                {
                    await ch.SendMessage("📄 **No votes have been cast.**");

                    return;
                }
                var closeMessage = $"--------------**POLL CLOSED**--------------\n" +
                                   $"📄 , here are the results:\n";
                foreach (var kvp in results)
                {
                    closeMessage += $"`{kvp.Key}.` **[{answers[kvp.Key - 1]}]** has {kvp.Value} votes.({kvp.Value * 1.0f / totalVotesCast * 100}%)\n";
                }

                await ch.SendMessage($"📄 **Total votes cast**: {totalVotesCast}\n{closeMessage}");
            } catch (Exception ex) {
                Console.WriteLine($"Error in poll game {ex}");
            }
        }
Example #3
0
        public EmbedBuilder GetStats(string title)
        {
            var results = _participants.GroupBy(kvp => kvp.Value)
                          .ToDictionary(x => x.Key, x => x.Sum(kvp => 1))
                          .OrderByDescending(kvp => kvp.Value)
                          .ToArray();

            var eb = new EmbedBuilder().WithTitle(title);

            var sb = new StringBuilder()
                     .AppendLine(Format.Bold(_question))
                     .AppendLine();

            var totalVotesCast = 0;

            if (results.Length == 0)
            {
                sb.AppendLine(GetText("poll_no_votes_cast"));
            }
            else
            {
                foreach (var result in results)
                {
                    sb.AppendLine(GetText("poll_result", result.Key, Format.Bold(_answers[result.Key - 1]), Format.Bold(result.Value.ToString())));
                    totalVotesCast += result.Value;
                }
            }

            eb.WithDescription(sb.ToString()).WithFooter(efb => efb.WithText(GetText("poll_x_votes_cast", totalVotesCast)));

            return(eb);
        }
Example #4
0
        public async Task StopPoll(Channel ch)
        {
            MidnightBot.Client.MessageReceived -= Vote;
            Poll throwaway;

            PollCommand.ActivePolls.TryRemove(e.Server, out throwaway);
            try
            {
                var results = participants.GroupBy(kvp => kvp.Value)
                              .ToDictionary(x => x.Key, x => x.Sum(kvp => 1))
                              .OrderBy(kvp => kvp.Value);

                var totalVotesCast = results.Sum(kvp => kvp.Value);
                if (totalVotesCast == 0)
                {
                    await ch.SendMessage("📄 **Es wurden keine Stimmen abgegeben**").ConfigureAwait(false);

                    return;
                }
                var closeMessage = $"--------------**Umfrage geschlossen**--------------\n" +
                                   $"📄 , hier sind die Ergebnisse:\n";
                closeMessage = results.Aggregate(closeMessage, (current, kvp) => current + $"`{kvp.Key}.` **[{answers[kvp.Key - 1]}]**" +
                                                 $" hat {kvp.Value} Stimmen." +
                                                 $"({kvp.Value * 1.0f / totalVotesCast * 100}%)\n");

                await ch.SendMessage($"📄 **Gesamte Anzahl an abgegebenen Stimmen**: {totalVotesCast}\n{closeMessage}").ConfigureAwait(false);
            }
            catch (Exception ex)
            {
                Console.WriteLine($"Fehler bei Umfrage {ex}");
            }
        }
 internal Dictionary <string, string[]> GetDiscoveredMissingKeys()
 {
     return(_missingKeyCache.GroupBy(x => x.Key.Item1).ToDictionary(
                k => k.Key,
                v => v.Select(x => x.Key.Item2).ToArray()
                ));
 }
Example #6
0
            public EmbedBuilder GetStats(string title)
            {
                var results = participants.GroupBy(kvp => kvp.Value)
                              .ToDictionary(x => x.Key, x => x.Sum(kvp => 1))
                              .OrderByDescending(kvp => kvp.Value)
                              .ToArray();

                var eb = new EmbedBuilder().WithTitle(title);

                var sb = new StringBuilder()
                         .AppendLine(Format.Bold(question))
                         .AppendLine();

                var totalVotesCast = 0;

                if (results.Length == 0)
                {
                    sb.AppendLine("No votes cast.");
                }
                else
                {
                    for (int i = 0; i < results.Length; i++)
                    {
                        var result = results[i];
                        sb.AppendLine($"`{i + 1}.` {Format.Bold(Answers[result.Key - 1])} with {Format.Bold(result.Value.ToString())} votes.");
                        totalVotesCast += result.Value;
                    }
                }


                eb.WithDescription(sb.ToString())
                .WithFooter(efb => efb.WithText(totalVotesCast + " total votes cast."));

                return(eb);
            }
Example #7
0
            public async Task StopPoll()
            {
                NadekoBot.Client.MessageReceived -= Vote;
                try
                {
                    var results = participants.GroupBy(kvp => kvp.Value)
                                  .ToDictionary(x => x.Key, x => x.Sum(kvp => 1))
                                  .OrderByDescending(kvp => kvp.Value);

                    var totalVotesCast = results.Sum(kvp => kvp.Value);
                    if (totalVotesCast == 0)
                    {
                        await originalMessage.Channel.SendMessageAsync("📄 **No votes have been cast.**").ConfigureAwait(false);

                        return;
                    }
                    var closeMessage = $"--------------**POLL CLOSED**--------------\n" +
                                       $"📄 , here are the results:\n";
                    closeMessage = results.Aggregate(closeMessage, (current, kvp) => current + $"`{kvp.Key}.` **[{answers[kvp.Key - 1]}]**" +
                                                     $" has {kvp.Value} votes." +
                                                     $"({kvp.Value * 1.0f / totalVotesCast * 100}%)\n");

                    await originalMessage.Channel.SendConfirmAsync($"📄 **Total votes cast**: {totalVotesCast}\n{closeMessage}").ConfigureAwait(false);
                }
                catch (Exception ex)
                {
                    Console.WriteLine($"Error in poll game {ex}");
                }
            }
Example #8
0
 /// <summary>
 /// Returns a response object from every forwarding hubs.
 /// </summary>
 /// <returns></returns>
 private List <IDotNetifyHubResponse> GetAllHubInstances()
 {
     return(_responseHubCallerContexts
            .GroupBy(x => x.Value.GetOriginConnectionContext().HubId)
            .Select(x => GetInstance(x.First().Key))
            .Concat(new[] { _hubResponse }) // Add this hub's own response object.
            .ToList());
 }
Example #9
0
        private Task RefreshStats()
        {
            var stats = participants.GroupBy(x => x.Value.ConnectionType)
                        .Select(x => new { transport = x.First().Value.ConnectionType, count = x.Count() });
            var data = JsonConvert.SerializeObject(stats);

            return(Clients.refreshStats(data));
        }
        public Task <IDictionary <int, Question[]> > GetQuestionsByCreatorsIdsAsync(params int[] creatorIds)
        {
            var index = creatorIds.ToDictionary(m => m);
            IDictionary <int, Question[]> resuts =
                _questions.GroupBy(m => m.Value.CreatorId, m => m.Value)
                .Where(g => index.ContainsKey(g.Key))
                .ToDictionary(g => g.Key, g => g.ToArray());

            return(Task.FromResult(resuts));
        }
        /// <summary>
        /// Maybe some extra credit
        /// I really love System.Linq that C# offers to run SQL like queries against data sets
        /// </summary>
        /// <param name="SalesData">The dictionary of ticket sales</param>
        static void PrintSalesReport(ConcurrentDictionary <Ticket, float> SalesData)
        {
            // Not sure how I feel about this but I need to re-generate all the potential users
            var allPeople = new List <float>();

            for (float i = 0; i < MaxBuyers; i++)
            {
                allPeople.Add(i);
            }

            // Then match that against who managed to purchase a ticket and that leaves the ID's of what thread didn't get any tickets
            var notFound = allPeople.Where(x => !SalesData.Select(y => y.Value).Contains(x));

            // List those users
            foreach (var notFoundUser in notFound)
            {
                Console.WriteLine($"User {notFoundUser} didn't manage to purchase any tickets");
            }

            // If I re-did this code I'd setup some rule that everyone must at least have one ticket before
            // Users can double up


            // Money Formatting -- https://docs.microsoft.com/en-us/dotnet/standard/base-types/standard-numeric-format-strings#the-currency-c-format-specifier
            var grps = SalesData.GroupBy(x => x.Value);                                                  // Get all the tickets sales and group them by the user id

            foreach (var grpSale in grps.OrderByDescending(x => x.Select(y => y.Key.TicketPrice).Sum())) // Order by the highest combined price
            {                                                                                            // For each user id returned by that grouping display information about the sale(s)
                Console.WriteLine($"User {grpSale.Key} purchased {grpSale.Count()}({String.Join(",", grpSale.Select(x => x.Key.TicketID))}) tickets for a total of {grpSale.Select(x=>x.Key.TicketPrice).Sum().ToString("C", CultureInfo.CurrentCulture)}");
            }


            var totalTickets = SalesData.Select(x => x.Key).LongCount();

            // Placeholder query to get all the pricing data
            var ticketPricingQuery = SalesData.Select(x => x.Key.TicketPrice);

            var earliestValue = SalesData.OrderBy(x => x.Key.SaleStart).Select(x => x.Key.SaleStart).First();
            var latestValue   = SalesData.Where(x => x.Key.SaleEnd.HasValue).OrderByDescending(x => x.Key.SaleEnd).Select(x => x.Key.SaleEnd.Value).First();

            Console.WriteLine(
                $@"Revenue from ticket sales:
Sum={ticketPricingQuery.Sum().ToString("C", CultureInfo.CurrentCulture)}
Average Ticket Price={ticketPricingQuery.Average().ToString("C", CultureInfo.CurrentCulture)}
Min={ticketPricingQuery.Min().ToString("C", CultureInfo.CurrentCulture)}    Max={ticketPricingQuery.Max().ToString("C", CultureInfo.CurrentCulture)}

Time to Sell={latestValue.Subtract(earliestValue).TotalSeconds} (seconds)
1 ticket per {latestValue.Subtract(earliestValue).TotalSeconds / totalTickets} (seconds)"
                );
        }
Example #12
0
        internal void LoadUVMorph(IPXMorph morph)
        {
            if (!morph.IsUV)
            {
                throw new ArgumentException("UVモーフ以外のモーフが指定されました。");
            }

            var materialMap = new ConcurrentDictionary <IPXUVMorphOffset, Material>(morph.Offsets.ToDictionary(o => (IPXUVMorphOffset)o, _ => (Material)null));

            materialMap.Keys.AsParallel().ForAll(offset =>
            {
                materialMap[offset] = Materials.First(m => m.Vertices.Contains(offset.Vertex));
            });

            var offsetsGroupByMaterial = materialMap.GroupBy(p => p.Value, p => p.Key);

            foreach (var offsetGroup in offsetsGroupByMaterial)
            {
                Do(offsetGroup.Key, new CommandMoveVerticesByMorph(offsetGroup));
            }
        }
        public List <string> GetCheckpointErrors()
        {
            List <string> errors = new List <string>();

            if (RebuildSettings.ShouldRebuild == false)
            {
                //need to check every slot for new projection
                var slots = _projectionToSlot.GroupBy(k => k.Value, k => k.Key);
                foreach (var slot in slots)
                {
                    var allCheckpoints = _checkpointTracker
                                         .Where(k => slot.Contains(k.Key));

                    var minCheckpoint = allCheckpoints.Min(k => k.Value);
                    var maxCheckpoint = allCheckpoints.Max(k => k.Value);

                    if (minCheckpoint != maxCheckpoint)
                    {
                        String error;
                        //error, ve have not all projection at the same checkpoint
                        if (allCheckpoints
                            .Where(k => k.Value != "0")
                            .Select(k => k.Value)
                            .Distinct().Count() == 1)
                        {
                            //We have one or more new projection in slot
                            error = String.Format("Error in slot {0}: we have new projections at checkpoint 0, rebuild needed!", slot.Key);
                        }
                        else
                        {
                            error = String.Format("Error in slot {0}, not all projection at the same checkpoint value. Please check reamodel db!", slot.Key);
                        }
                        errors.Add(error);
                    }
                }
            }
            return(errors);
        }
Example #14
0
        public Dictionary <string, object> GetCompiledViews(bool splitted = false, string area = null)
        {
            var dictionary       = new Dictionary <string, object>();
            var groupedTemplates = _parsedTemplateCache.GroupBy(x => x.Key.Context);

            foreach (var gt in groupedTemplates)
            {
                if (!dictionary.ContainsKey(gt.Key))
                {
                    dictionary.Add(gt.Key.ToLower(), null);
                }

                dictionary[gt.Key.ToLower()] = gt.ToDictionary(x => x.Key.Url, x =>
                {
                    if (splitted)
                    {
                        return(x.Value.ToSplited());
                    }
                    return((object)x.Value.Raw);
                });
            }
            return(dictionary);
        }
        public ActivitySummary Summarize()
        {
            var result = new ActivitySummary();

            result.Actions = calls.Select(c =>
            {
                var slot    = new ActivitySlot();
                slot.Action = c.Key;
                foreach (var s in c.Value)
                {
                    slot.Tick(s.Calls, s.TotalMsecs);
                }
                return(slot);
            }).OrderByDescending(s => s.Calls);

            result.Users = new ReadOnlyDictionary <string, DateTime>(users);

            result.ByFacility = users.GroupBy(u => u.Key.Substring(0, 5))
                                .ToDictionary(g => g.Key, g => g.Count());

            result.UsersByHours      = UsersByHours.ToDictionary(u => u.Key.ToString(), u => u.Value);
            result.DistinctUserCount = distinctUsers.Count;
            return(result);
        }
Example #16
0
        public Dictionary <byte[], List <string> > GetDuplicates()
        {
            var fileHashes = new ConcurrentDictionary <string, byte[]>();

            foreach (var file in _fileInfos)
            {
                try
                {
                    var fileMd5 = CreateMD5(file);
                    fileHashes.TryAdd(file.FullName, fileMd5);
                    _messager.LogInfo($"{ByteArrayToString(fileMd5),-35}{file.Name}");
                }
                catch (Exception ex)
                {
                    _messager.LogError(ex, $"Problem when creating MD5 from file {file.FullName}");
                }
            }

            var grouped = fileHashes.GroupBy(x => x.Value, new BitArrayComparer())
                          .ToDictionary(x => x.Key, y => y.Select(z => z.Key)
                                        .ToList());

            return(grouped);
        }
Example #17
0
        public BermudaResult GetData(string domain, string query, string mapreduce, string merge, string paging, int remdepth, string command, string cursor, string paging2)
        {
            var args = ParseCommand(command);
            bool noCache = args.Contains("-nocache");
            bool makeCursor = cursor == MakeCursorToken;
            bool useCursor = !makeCursor && !string.IsNullOrWhiteSpace(cursor);

            DateTime minDate = DateTime.MinValue;
            DateTime maxDate = DateTime.MaxValue;

            if (remdepth > 0)
            {
                //map
                var queryHash = cursor ?? GetQueryHash(domain, query, mapreduce, merge, paging, null);

                //reduce 
                BermudaResult cachedDatapoints;
                if (!noCache && CachedData.TryGetValue(queryHash, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
#if DEBUG
                    if (CacheTraceMessageLevel < 3) Trace.WriteLine("returned CACHED BLOBS DATAPOINTS results FOR ENTIRE BLOB SET [REMDEPTH:" + remdepth + "]");
#endif

                    if (useCursor)
                    {
                        var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints.DataType);
                        return GetCursorData(paging2, cachedDatapoints, dataType);
                    }
                    else
                    {
                        return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, Metadata = new BermudaNodeStatistic { Notes = "Cache_Hit_1" }, CacheKey = cachedDatapoints.CacheKey };
                    }
                }
                else
                {
                    if (useCursor) throw new Exception("Cursor " + cursor + " not found");
                    //var assignments = PartitionBlobs(domain, blobInterfaces, minDate, maxDate, false, true);

                    var reducers = HostEnvironment.Instance.GetAvailablePeerConnections();

                    if (!reducers.Any()) throw new Exception("Specified dataset not loaded: " + domain);

                    ConcurrentDictionary<PeerInfo, BermudaResult> results = new ConcurrentDictionary<PeerInfo, BermudaResult>();
                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    List<Task> tasks = new List<Task>();
                    foreach (var reducer in reducers)
                    {
                        Task t = new Task((peerObj) =>
                        {
                            var peerInfo = peerObj as PeerInfo;
                            var initiated = DateTime.Now;
                            var subqueryHash = GetQueryHash(domain, query, mapreduce, merge, paging, peerInfo.ToString());
                            Stopwatch sw3 = new Stopwatch();
                            sw3.Start();

                            //see if the cache contains a matching result and return it if it's not outdated
                            BermudaResult cachedDatapoints2;
                            if (!noCache && CachedData.TryGetValue(subqueryHash, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                            {
                                if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB DATAPOINT results FOR BLOB SUBSET [REMDEPTH:" + remdepth + "]");

                                BermudaResult res = null;

                                if (useCursor) 
                                {
                                    var dataType2 = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints2.DataType);
                                    res = GetCursorData(paging2, cachedDatapoints2, dataType2);
                                }
                                else 
                                {
                                    res = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, Metadata = new BermudaNodeStatistic { Notes = "Cache_Hit_2" } };
                                }
                                
                                results[peerInfo] = res;
                            }
                            else
                            {
                                try
                                {
                                    Stopwatch sw2 = new Stopwatch();
                                    sw2.Start();
                                    BermudaResult subresult = null;

                                    if (peerInfo.Equals(Endpoint))
                                    {
                                        subresult = GetData(domain, query, mapreduce, merge, paging, remdepth - 1, command, cursor, paging2);

                                    }
                                    else
                                    {
                                        using (var client = HostEnvironment.GetServiceClient(peerInfo))
                                        {
                                            subresult = client.GetData(domain, query, mapreduce, merge, paging, remdepth - 1, command, cursor, paging2);
                                        }
                                        //subresult = GetDataFromPeer(domain, query, mapreduce, merge, minDate, maxDate, remdepth - 1, command, assignment.PeerEndpoint.Endpoint);
                                    }

                                    sw2.Stop();
                                    subresult.CreatedOn = DateTime.Now.Ticks;
                                    subresult.Metadata.Initiated = initiated;
                                    subresult.Metadata.Completed = DateTime.Now;
                                    subresult.Metadata.OperationTime = sw2.Elapsed;
                                    results[peerInfo] = CachedData[subqueryHash] = subresult;
                                }
                                catch (Exception ex)
                                {
                                    results[peerInfo] = new BermudaResult { Error = "[Failed Node] " + ex };
                                }
                            }
                        }, reducer, TaskCreationOptions.LongRunning);

                        tasks.Add(t);
                        t.Start();
                    }

                    Task.WaitAll(tasks.ToArray());

                    sw.Stop();

#if DEBUG
                    Trace.WriteLine("Join Time:" + sw.Elapsed);
#endif

                    if (results.Any(x => x.Value.Error != null)) throw new BermudaException("Some nodes failed:\r\n" + string.Join("\r\n", results.Select(x => x.Value.Error)));

                    if (results.All(x => x.Value.Data == null)) return new BermudaResult { Metadata = new BermudaNodeStatistic { Notes = "No Data" } };

                    //if all results are not the same time throw an error
                    if (results.GroupBy(x => x.Value.DataType).Count() > 1) throw new BermudaException("Subresults must all return the same type");

                    var dataTypeDescriptor = results.Select(x => x.Value.DataType).FirstOrDefault(x => x != null);

                    if (dataTypeDescriptor == null) return new BermudaResult { Error = "Could not determine the merge type, none of the nodes provided type info" };

                    //use the passed combine espression to make multiple datapoint sets into one

                    var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(dataTypeDescriptor);

                    //allItems = results.Values.SelectMany(x => x.DataObject)

                    var totalJson = "[" + string.Join(",", results.Values.Where(x => !string.IsNullOrWhiteSpace(x.Data)).Select(x => x.Data.Trim('[', ']')).Where(x => !string.IsNullOrWhiteSpace(x))) + "]";

                    var allItems = LinqRuntimeTypeBuilder.DeserializeJson(totalJson, dataTypeDescriptor, true);


                    //var aaa = new JavaScriptSerializer().Deserialize<Datapoint[]>(totalJson);
                    //var ggc = aaa.GroupBy(x => new { x.Id, x.Id2 }).Count();

                    //InvokeSelectManyViaReflectionTheKilla(results.Values.Select(x => x.DataObject), dataType);

                    var mergeFunc = GetMergeFunc(merge, mapreduce, dataType, dataType);
                    if (mergeFunc != null)
                    {
                        //var dataType = "kdsajkdsa";
                        var mergeInvokeMethod = mergeFunc.GetType().GetMethod("Invoke");
                        allItems = mergeInvokeMethod.Invoke(mergeFunc, new object[] { allItems }); // MergeDatapoints(results.Values.Where(x => x.Data != null).SelectMany(x => x.Data), mergeFunc);
                    }

                    var pagingFunc = GetPagingFunc(paging, dataType);
                    if (pagingFunc != null)
                    {
                        var pagingInvokeMethod = pagingFunc.GetType().GetMethod("Invoke");
                        allItems = pagingInvokeMethod.Invoke(pagingFunc, new object[] { allItems });
                    }

                    //figure out the metadata
                    var finalMetadata = new BermudaNodeStatistic { Notes = "Merged Datapoints in " + sw.Elapsed, NodeId = HostEnvironment.Instance.CurrentInstanceId, ChildNodes = results.Values.Select(x => x.Metadata).ToArray() };

                    var arraylol = ToArrayCollection(allItems, dataType);

                    var json = JsonConvert.SerializeObject(arraylol);
                    //var json = JsonConvert.SerializeObject(allItems);

                    var originalData = makeCursor ? arraylol : null;

                    var finalResult = new BermudaResult { DataType = dataTypeDescriptor, OriginalData = originalData, Data = json, CreatedOn = DateTime.Now.Ticks, Metadata = finalMetadata, CacheKey = queryHash };

                    CachedData[queryHash] = finalResult;

                    return finalResult;
                }
            }
            else
            {
                ConcurrentDictionary<string, BermudaResult> results = new ConcurrentDictionary<string, BermudaResult>();
                BermudaNodeStatistic stats = new BermudaNodeStatistic();

                var bucketInterfaces = HostEnvironment.Instance.GetBucketInterfacesForDomain(domain);

                if (!bucketInterfaces.Any()) throw new BermudaException("Data not loaded for: " + domain);
                if (bucketInterfaces.Count() > 1) throw new BermudaException("Multiple buckets not supported by BermudaMapReduce");

                var queryHash = GetQueryHash(domain, query, mapreduce, merge, paging, Endpoint.ToString());

                BermudaResult cachedDatapoints;
                if (!noCache && CachedData.TryGetValue(queryHash, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
                    if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB SET DATAPOINT results [REMDEPTH:" + remdepth + "]");

                    if (useCursor)
                    {
                        var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints.DataType);
                        return GetCursorData(paging2, cachedDatapoints, dataType);
                    }
                    else
                    {
                        return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, Metadata = new BermudaNodeStatistic { Notes = "Cache_Hit_3" }, CacheKey = queryHash };
                    }
                }
                else
                {
                    //Chad: short circuiting to test WCF response time in Azure
                    //return new DatapointResult() { Datapoints = new List<Datapoint>(), CreatedOn = DateTime.Now.Ticks, Metadata = new BermudaNodeStatistic() };


                    //IEnumerable<Datapoint> datapoints = null;
                    object datapoints = null;

                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    Type itemType = null;
                    Type resultType = null;
                    string json = null;

                    foreach (var bucketInterface in bucketInterfaces)
                    {
                        var bucketKey = GetQueryHash(domain, query, mapreduce, merge, paging, Endpoint.ToString());

                        //see if the cache contains a matching result and return it if it's not outdated
                        BermudaResult cachedDatapoints2;
                        if (!noCache && CachedData.TryGetValue(bucketKey, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                        {
                            if (CacheTraceMessageLevel < 1) Trace.WriteLine("returned CACHED BLOB DATAPOINT results  [REMDEPTH:" + remdepth + "]");

                            if (useCursor)
                            {
                                if (cachedDatapoints2.OriginalData == null) throw new Exception("Cursor " + cursor + " contains null data");
                                var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints2.DataType);
                                results[bucketInterface.Name] = GetCursorData(paging2, cachedDatapoints2, dataType);
                                
                            }
                            else
                            {
                                results[bucketInterface.Name] = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, Metadata = new BermudaNodeStatistic { Notes = "Cache_Hit_4" } };
                                json = cachedDatapoints2.Data;
                            }
                        }
                        else
                        {
                            //get mentions
                            var collections = GetCollections(query, mapreduce);

                            if (collections.Count() > 1) throw new BermudaException("More than one collection specified: " + string.Join(",", collections));

                            var table = collections.FirstOrDefault();

                            var tableName = table == null ? null : table.Source;

                            var raw = bucketInterface.GetData(tableName);
                            //var rawType = raw.GetType();
                            //itemType = ReduceExpressionGeneration.GetTypeOfEnumerable(rawType);
                            itemType = bucketInterface.GetDataType(tableName);
                            var mapreduceFunc = GetMapReduceFunc(mapreduce, itemType, out resultType);
                            var queryFunc = GetFilterFunc(query, itemType);
                            var pagingFunc = GetPagingFunc(paging, resultType);
                    
                            var minDateTicks = minDate.Ticks;
                            var maxDateTicks = maxDate.Ticks;


                            object subresult = raw;
                             
                                //queryFunc == null ?
                                //    raw.AsParallel() :
                                //minDate == DateTime.MinValue && maxDate == DateTime.MaxValue ?
                                //    raw.AsParallel().Where(x => queryFunc) :
                                //    raw.AsParallel().Where(x => x.OccurredOnTicks >= minDateTicks && x.OccurredOnTicks <= maxDateTicks && queryFunc(x, parameters));

                            if (json == null)
                            {
                                if (queryFunc != null)
                                {
                                    var queryFuncInvoke = queryFunc.GetType().GetMethod("Invoke");
                                    subresult = queryFuncInvoke.Invoke(queryFunc, new object[] { subresult });
                                }

                                //reduce them using the passed expression
                                if (mapreduceFunc != null)
                                {
                                    var mapReduceFuncInvoke = mapreduceFunc.GetType().GetMethod("Invoke");
                                    subresult = mapReduceFuncInvoke.Invoke(mapreduceFunc, new object[] { subresult });
                                }

                                if (pagingFunc != null)
                                {
                                    var pagingInvokeMethod = pagingFunc.GetType().GetMethod("Invoke");
                                    subresult = pagingInvokeMethod.Invoke(pagingFunc, new object[] { subresult });
                                }


                                datapoints = subresult;
                            }

                            //format a metada string
                            if (!args.Contains("-nocount"))
                            {
                                stats.TotalItems = bucketInterface.GetCount(tableName);
                                //stats.FilteredItems = filtered.Count();
                                //stats.ReducedItems = subresult.Count();
                            }

                            //cache the result
                            //results[blobInterface.Name] = new DatapointResult { Datapoints = subresult, CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                            //CachedDatapoints[blobKey] = new DatapointResult { Datapoints = subresult.ToList(), CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                        }
                    }

                    //figure out the metadata
                    //var finalMetadata = "    [@" + AzureInterface.Instance.CurrentInstanceId + "] Calculated Datapoints:\r\n" + string.Join("\r\n", results.Values.Select(x => x.Metadata));

                    stats.NodeId = HostEnvironment.Instance.CurrentInstanceId;
                    stats.Notes = "Computed Datapoints";
                    
                    //Trace.WriteLine("total mentions processed: " + mentionCount);

                    //var datapoints = results.Values.SelectMany(x => x.Datapoints);
                    if (datapoints == null) return new BermudaResult() { Metadata = new BermudaNodeStatistic { Notes = "No Results" } };

                    //foreach (var p in datapoints) if (p.IsCount) p.Value = p.Count;

                    var mergeFunc = GetMergeFunc(merge, mapreduce, itemType, resultType);
                    if (mergeFunc != null)
                    {
                        var mergeFuncInvoke = mergeFunc.GetType().GetMethod("Invoke");
                        datapoints = mergeFuncInvoke.Invoke(mergeFunc, new object[] { datapoints });
                    }

                    stats.LinqExecutionTime = sw.Elapsed;

                    var arraylol = ToArrayCollection(datapoints, resultType);

                    if (json == null && datapoints != null)
                    {
                        json = JsonConvert.SerializeObject(arraylol);
                    }
                    
                    //var json = JsonConvert.SerializeObject(datapoints);
                    var originalData = makeCursor ? arraylol : null;

                    var result = CachedData[queryHash] = new BermudaResult { DataType = LinqRuntimeTypeBuilder.GetTypeKey(resultType), OriginalData = originalData, Data = json, CreatedOn = DateTime.Now.Ticks, Metadata = stats  };

                    sw.Stop();

                    return result;
                }
            }
        }
        /// <summary>
        ///
        /// </summary>
        /// <param name="blocks"></param>
        /// <returns></returns>
        private async Task PushLocal(ConcurrentDictionary <ulong, BlockGraphProto> blocks)
        {
            if (blocks == null)
            {
                throw new ArgumentNullException(nameof(blocks));
            }

            if (blocks.Any() != true)
            {
                return;
            }

            try
            {
                var payload = blocks.Select(k => k.Value);

                var response = await httpService.Dial($"{httpService.GatewayUrl}/blockgraphs", Util.SerializeProto(payload));

                if (response.IsSuccessStatusCode)
                {
                    var jToken    = Util.ReadJToken(response, "protobufs");
                    var byteArray = Convert.FromBase64String(jToken.Value <string>());

                    if (byteArray.Length > 0)
                    {
                        var blockHashes = Util.DeserializeListProto <BlockInfoProto>(byteArray);
                        if (blockHashes.Any())
                        {
                            if (payload.Count() == blockHashes.Count())
                            {
                                var group = blocks.GroupBy(h => h.Value.Block.Hash);
                                foreach (var next in group)
                                {
                                    var hash     = next.FirstOrDefault().Value.Block.Hash;
                                    var jobProto = await unitOfWork.Job.GetFirstOrDefault(x => x.Hash.Equals(hash));

                                    if (jobProto != null)
                                    {
                                        jobProto.Status = JobState.Answered;

                                        next.ForEach((k) =>
                                        {
                                            BlockGraphProto.AddDependency(jobProto.BlockGraph, k.Value);
                                        });

                                        jobProto.TotalNodes = next.Count();

                                        jobProto.Nodes.AddRange(next.Select(n => n.Key));
                                        jobProto.WaitingOn.Clear();

                                        await unitOfWork.Job.StoreOrUpdate(jobProto, jobProto.Id);
                                    }
                                }
                            }
                        }
                    }
                    else
                    {
                        await Fallback(blocks);
                    }
                }
            }
            catch (Exception ex)
            {
                logger.LogError($"<<< MissingBlocksProvider.PushLocal >>>: {ex.ToString()}");
            }
        }
Example #19
0
        public BermudaResult GetData(string domain, string query, string mapreduce, string merge, string paging, int remdepth, string command, string cursor, string paging2)
        {
            var  args       = ParseCommand(command);
            bool noCache    = args.Contains("-nocache");
            bool makeCursor = cursor == MakeCursorToken;
            bool useCursor  = !makeCursor && !string.IsNullOrWhiteSpace(cursor);

            DateTime minDate = DateTime.MinValue;
            DateTime maxDate = DateTime.MaxValue;

            if (remdepth > 0)
            {
                //map
                var queryHash = cursor ?? GetQueryHash(domain, query, mapreduce, merge, paging, null);

                //reduce
                BermudaResult cachedDatapoints;
                if (!noCache && CachedData.TryGetValue(queryHash, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
#if DEBUG
                    if (CacheTraceMessageLevel < 3)
                    {
                        Trace.WriteLine("returned CACHED BLOBS DATAPOINTS results FOR ENTIRE BLOB SET [REMDEPTH:" + remdepth + "]");
                    }
#endif

                    if (useCursor)
                    {
                        var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints.DataType);
                        return(GetCursorData(paging2, cachedDatapoints, dataType));
                    }
                    else
                    {
                        return(new BermudaResult {
                            DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, Metadata = new BermudaNodeStatistic {
                                Notes = "Cache_Hit_1"
                            }, CacheKey = cachedDatapoints.CacheKey
                        });
                    }
                }
                else
                {
                    if (useCursor)
                    {
                        throw new Exception("Cursor " + cursor + " not found");
                    }
                    //var assignments = PartitionBlobs(domain, blobInterfaces, minDate, maxDate, false, true);

                    var reducers = HostEnvironment.Instance.GetAvailablePeerConnections();

                    if (!reducers.Any())
                    {
                        throw new Exception("Specified dataset not loaded: " + domain);
                    }

                    ConcurrentDictionary <PeerInfo, BermudaResult> results = new ConcurrentDictionary <PeerInfo, BermudaResult>();
                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    List <Task> tasks = new List <Task>();
                    foreach (var reducer in reducers)
                    {
                        Task t = new Task((peerObj) =>
                        {
                            var peerInfo     = peerObj as PeerInfo;
                            var initiated    = DateTime.Now;
                            var subqueryHash = GetQueryHash(domain, query, mapreduce, merge, paging, peerInfo.ToString());
                            Stopwatch sw3    = new Stopwatch();
                            sw3.Start();

                            //see if the cache contains a matching result and return it if it's not outdated
                            BermudaResult cachedDatapoints2;
                            if (!noCache && CachedData.TryGetValue(subqueryHash, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                            {
                                if (CacheTraceMessageLevel < 2)
                                {
                                    Trace.WriteLine("returned CACHED BLOB DATAPOINT results FOR BLOB SUBSET [REMDEPTH:" + remdepth + "]");
                                }

                                BermudaResult res = null;

                                if (useCursor)
                                {
                                    var dataType2 = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints2.DataType);
                                    res           = GetCursorData(paging2, cachedDatapoints2, dataType2);
                                }
                                else
                                {
                                    res = new BermudaResult {
                                        DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, Metadata = new BermudaNodeStatistic {
                                            Notes = "Cache_Hit_2"
                                        }
                                    };
                                }

                                results[peerInfo] = res;
                            }
                            else
                            {
                                try
                                {
                                    Stopwatch sw2 = new Stopwatch();
                                    sw2.Start();
                                    BermudaResult subresult = null;

                                    if (peerInfo.Equals(Endpoint))
                                    {
                                        subresult = GetData(domain, query, mapreduce, merge, paging, remdepth - 1, command, cursor, paging2);
                                    }
                                    else
                                    {
                                        using (var client = HostEnvironment.GetServiceClient(peerInfo))
                                        {
                                            subresult = client.GetData(domain, query, mapreduce, merge, paging, remdepth - 1, command, cursor, paging2);
                                        }
                                        //subresult = GetDataFromPeer(domain, query, mapreduce, merge, minDate, maxDate, remdepth - 1, command, assignment.PeerEndpoint.Endpoint);
                                    }

                                    sw2.Stop();
                                    subresult.CreatedOn              = DateTime.Now.Ticks;
                                    subresult.Metadata.Initiated     = initiated;
                                    subresult.Metadata.Completed     = DateTime.Now;
                                    subresult.Metadata.OperationTime = sw2.Elapsed;
                                    results[peerInfo] = CachedData[subqueryHash] = subresult;
                                }
                                catch (Exception ex)
                                {
                                    results[peerInfo] = new BermudaResult {
                                        Error = "[Failed Node] " + ex
                                    };
                                }
                            }
                        }, reducer, TaskCreationOptions.LongRunning);

                        tasks.Add(t);
                        t.Start();
                    }

                    Task.WaitAll(tasks.ToArray());

                    sw.Stop();

#if DEBUG
                    Trace.WriteLine("Join Time:" + sw.Elapsed);
#endif

                    if (results.Any(x => x.Value.Error != null))
                    {
                        throw new BermudaException("Some nodes failed:\r\n" + string.Join("\r\n", results.Select(x => x.Value.Error)));
                    }

                    if (results.All(x => x.Value.Data == null))
                    {
                        return new BermudaResult {
                                   Metadata = new BermudaNodeStatistic {
                                       Notes = "No Data"
                                   }
                        }
                    }
                    ;

                    //if all results are not the same time throw an error
                    if (results.GroupBy(x => x.Value.DataType).Count() > 1)
                    {
                        throw new BermudaException("Subresults must all return the same type");
                    }

                    var dataTypeDescriptor = results.Select(x => x.Value.DataType).FirstOrDefault(x => x != null);

                    if (dataTypeDescriptor == null)
                    {
                        return new BermudaResult {
                                   Error = "Could not determine the merge type, none of the nodes provided type info"
                        }
                    }
                    ;

                    //use the passed combine espression to make multiple datapoint sets into one

                    var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(dataTypeDescriptor);

                    //allItems = results.Values.SelectMany(x => x.DataObject)

                    var totalJson = "[" + string.Join(",", results.Values.Where(x => !string.IsNullOrWhiteSpace(x.Data)).Select(x => x.Data.Trim('[', ']')).Where(x => !string.IsNullOrWhiteSpace(x))) + "]";

                    var allItems = LinqRuntimeTypeBuilder.DeserializeJson(totalJson, dataTypeDescriptor, true);


                    //var aaa = new JavaScriptSerializer().Deserialize<Datapoint[]>(totalJson);
                    //var ggc = aaa.GroupBy(x => new { x.Id, x.Id2 }).Count();

                    //InvokeSelectManyViaReflectionTheKilla(results.Values.Select(x => x.DataObject), dataType);

                    var mergeFunc = GetMergeFunc(merge, mapreduce, dataType, dataType);
                    if (mergeFunc != null)
                    {
                        //var dataType = "kdsajkdsa";
                        var mergeInvokeMethod = mergeFunc.GetType().GetMethod("Invoke");

                        allItems = mergeInvokeMethod.Invoke(mergeFunc, new object[] { allItems }); // MergeDatapoints(results.Values.Where(x => x.Data != null).SelectMany(x => x.Data), mergeFunc);
                    }

                    var pagingFunc = GetPagingFunc(paging, dataType);
                    if (pagingFunc != null)
                    {
                        var pagingInvokeMethod = pagingFunc.GetType().GetMethod("Invoke");
                        allItems = pagingInvokeMethod.Invoke(pagingFunc, new object[] { allItems });
                    }

                    //figure out the metadata
                    var finalMetadata = new BermudaNodeStatistic {
                        Notes = "Merged Datapoints in " + sw.Elapsed, NodeId = HostEnvironment.Instance.CurrentInstanceId, ChildNodes = results.Values.Select(x => x.Metadata).ToArray()
                    };

                    var arraylol = ToArrayCollection(allItems, dataType);

                    var json = JsonConvert.SerializeObject(arraylol);
                    //var json = JsonConvert.SerializeObject(allItems);

                    var originalData = makeCursor ? arraylol : null;

                    var finalResult = new BermudaResult {
                        DataType = dataTypeDescriptor, OriginalData = originalData, Data = json, CreatedOn = DateTime.Now.Ticks, Metadata = finalMetadata, CacheKey = queryHash
                    };

                    CachedData[queryHash] = finalResult;

                    return(finalResult);
                }
            }
            else
            {
                ConcurrentDictionary <string, BermudaResult> results = new ConcurrentDictionary <string, BermudaResult>();
                BermudaNodeStatistic stats = new BermudaNodeStatistic();

                var bucketInterfaces = HostEnvironment.Instance.GetBucketInterfacesForDomain(domain);

                if (!bucketInterfaces.Any())
                {
                    throw new BermudaException("Data not loaded for: " + domain);
                }
                if (bucketInterfaces.Count() > 1)
                {
                    throw new BermudaException("Multiple buckets not supported by BermudaMapReduce");
                }

                var queryHash = GetQueryHash(domain, query, mapreduce, merge, paging, Endpoint.ToString());

                BermudaResult cachedDatapoints;
                if (!noCache && CachedData.TryGetValue(queryHash, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
                    if (CacheTraceMessageLevel < 2)
                    {
                        Trace.WriteLine("returned CACHED BLOB SET DATAPOINT results [REMDEPTH:" + remdepth + "]");
                    }

                    if (useCursor)
                    {
                        var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints.DataType);
                        return(GetCursorData(paging2, cachedDatapoints, dataType));
                    }
                    else
                    {
                        return(new BermudaResult {
                            DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, Metadata = new BermudaNodeStatistic {
                                Notes = "Cache_Hit_3"
                            }, CacheKey = queryHash
                        });
                    }
                }
                else
                {
                    //Chad: short circuiting to test WCF response time in Azure
                    //return new DatapointResult() { Datapoints = new List<Datapoint>(), CreatedOn = DateTime.Now.Ticks, Metadata = new BermudaNodeStatistic() };


                    //IEnumerable<Datapoint> datapoints = null;
                    object datapoints = null;

                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    Type   itemType   = null;
                    Type   resultType = null;
                    string json       = null;

                    foreach (var bucketInterface in bucketInterfaces)
                    {
                        var bucketKey = GetQueryHash(domain, query, mapreduce, merge, paging, Endpoint.ToString());

                        //see if the cache contains a matching result and return it if it's not outdated
                        BermudaResult cachedDatapoints2;
                        if (!noCache && CachedData.TryGetValue(bucketKey, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                        {
                            if (CacheTraceMessageLevel < 1)
                            {
                                Trace.WriteLine("returned CACHED BLOB DATAPOINT results  [REMDEPTH:" + remdepth + "]");
                            }

                            if (useCursor)
                            {
                                if (cachedDatapoints2.OriginalData == null)
                                {
                                    throw new Exception("Cursor " + cursor + " contains null data");
                                }
                                var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(cachedDatapoints2.DataType);
                                results[bucketInterface.Name] = GetCursorData(paging2, cachedDatapoints2, dataType);
                            }
                            else
                            {
                                results[bucketInterface.Name] = new BermudaResult {
                                    DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, Metadata = new BermudaNodeStatistic {
                                        Notes = "Cache_Hit_4"
                                    }
                                };
                                json = cachedDatapoints2.Data;
                            }
                        }
                        else
                        {
                            //get mentions
                            var collections = GetCollections(query, mapreduce);

                            if (collections.Count() > 1)
                            {
                                throw new BermudaException("More than one collection specified: " + string.Join(",", collections));
                            }

                            var table = collections.FirstOrDefault();

                            var tableName = table == null ? null : table.Source;

                            var raw = bucketInterface.GetData(tableName);
                            //var rawType = raw.GetType();
                            //itemType = ReduceExpressionGeneration.GetTypeOfEnumerable(rawType);
                            itemType = bucketInterface.GetDataType(tableName);
                            var mapreduceFunc = GetMapReduceFunc(mapreduce, itemType, out resultType);
                            var queryFunc     = GetFilterFunc(query, itemType);
                            var pagingFunc    = GetPagingFunc(paging, resultType);

                            var minDateTicks = minDate.Ticks;
                            var maxDateTicks = maxDate.Ticks;


                            object subresult = raw;

                            //queryFunc == null ?
                            //    raw.AsParallel() :
                            //minDate == DateTime.MinValue && maxDate == DateTime.MaxValue ?
                            //    raw.AsParallel().Where(x => queryFunc) :
                            //    raw.AsParallel().Where(x => x.OccurredOnTicks >= minDateTicks && x.OccurredOnTicks <= maxDateTicks && queryFunc(x, parameters));

                            if (json == null)
                            {
                                if (queryFunc != null)
                                {
                                    var queryFuncInvoke = queryFunc.GetType().GetMethod("Invoke");
                                    subresult = queryFuncInvoke.Invoke(queryFunc, new object[] { subresult });
                                }

                                //reduce them using the passed expression
                                if (mapreduceFunc != null)
                                {
                                    var mapReduceFuncInvoke = mapreduceFunc.GetType().GetMethod("Invoke");
                                    subresult = mapReduceFuncInvoke.Invoke(mapreduceFunc, new object[] { subresult });
                                }

                                if (pagingFunc != null)
                                {
                                    var pagingInvokeMethod = pagingFunc.GetType().GetMethod("Invoke");
                                    subresult = pagingInvokeMethod.Invoke(pagingFunc, new object[] { subresult });
                                }


                                datapoints = subresult;
                            }

                            //format a metada string
                            if (!args.Contains("-nocount"))
                            {
                                stats.TotalItems = bucketInterface.GetCount(tableName);
                                //stats.FilteredItems = filtered.Count();
                                //stats.ReducedItems = subresult.Count();
                            }

                            //cache the result
                            //results[blobInterface.Name] = new DatapointResult { Datapoints = subresult, CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                            //CachedDatapoints[blobKey] = new DatapointResult { Datapoints = subresult.ToList(), CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                        }
                    }

                    //figure out the metadata
                    //var finalMetadata = "    [@" + AzureInterface.Instance.CurrentInstanceId + "] Calculated Datapoints:\r\n" + string.Join("\r\n", results.Values.Select(x => x.Metadata));

                    stats.NodeId = HostEnvironment.Instance.CurrentInstanceId;
                    stats.Notes  = "Computed Datapoints";

                    //Trace.WriteLine("total mentions processed: " + mentionCount);

                    //var datapoints = results.Values.SelectMany(x => x.Datapoints);
                    if (datapoints == null)
                    {
                        return new BermudaResult()
                               {
                                   Metadata = new BermudaNodeStatistic {
                                       Notes = "No Results"
                                   }
                               }
                    }
                    ;

                    //foreach (var p in datapoints) if (p.IsCount) p.Value = p.Count;

                    var mergeFunc = GetMergeFunc(merge, mapreduce, itemType, resultType);
                    if (mergeFunc != null)
                    {
                        var mergeFuncInvoke = mergeFunc.GetType().GetMethod("Invoke");

                        datapoints = mergeFuncInvoke.Invoke(mergeFunc, new object[] { datapoints });
                    }

                    stats.LinqExecutionTime = sw.Elapsed;

                    var arraylol = ToArrayCollection(datapoints, resultType);

                    if (json == null && datapoints != null)
                    {
                        json = JsonConvert.SerializeObject(arraylol);
                    }

                    //var json = JsonConvert.SerializeObject(datapoints);
                    var originalData = makeCursor ? arraylol : null;

                    var result = CachedData[queryHash] = new BermudaResult {
                        DataType = LinqRuntimeTypeBuilder.GetTypeKey(resultType), OriginalData = originalData, Data = json, CreatedOn = DateTime.Now.Ticks, Metadata = stats
                    };

                    sw.Stop();

                    return(result);
                }
            }
        }
Example #20
0
    private static void ProcessingExpeditionEvents() {
      const string template = "ExpeditionEvents";
      var ResultEvents = new ConcurrentDictionary<XElement, ConcurrentBag<HashSet<AssetWithWeight>>>();
      var count = 1;

      var decicions = Assets.Original
          .XPathSelectElements("//Asset[Template='ExpeditionDecision']")
          .Where(f => f.XPathSelectElement("Values/Reward/RewardAssets")?.Elements("Item").Any(r => r.Element("Reward")?.Value != null) ?? false)
          .ToList();

      decicions.AddRange(Assets.Original.XPathSelectElements("//Asset[Template='ExpeditionTrade']"));
      ConsoleWriteHeadline(template + "  Total: " + decicions.Count);

      decicions.AsParallel().ForAll(decicion => {
        ConsoleWriteGUID(decicion.XPathSelectElement("Values/Standard/GUID").Value + " - " + count++);
        foreach (var events in VerasFindExpeditionEvents(decicion.XPathSelectElement("Values/Standard/GUID").Value, new HashSet<String>(), new Details { decicion })) {
          foreach (var item in events) {
            if (ResultEvents.ContainsKey(item.Source)) {
              ResultEvents[item.Source].Add(item.Details);
            }
            else {
              ResultEvents.TryAdd(item.Source, new ConcurrentBag<HashSet<AssetWithWeight>> { item.Details });
            }
          }
        }
      });

      var document = new XDocument();
      document.Add(new XElement(template));
      document.Root.Add(ResultEvents.GroupBy(f => f.Key.XPathSelectElement("Values/Standard/Name").Value)
          .Select(g => g.First())
          .OrderBy(s => { var str = ("000" + s.Value.Count); return str.Substring(str.Length - 4) + " " + s.Key.XPathSelectElement("Values/Standard/GUID").Value; })
          .Select(s => ToXml(s)));
      document.SaveIndent($@"{Program.PathRoot}\Modified\Assets_{template}.xml");
      document.SaveIndent($@"{Program.PathViewer}\Resources\Assets\{template}.xml");

      //local method ExpeditionEvents ToXml
      XElement ToXml(KeyValuePair<XElement, ConcurrentBag<HashSet<AssetWithWeight>>> events) {
        var xRoot = new XElement("EE");
        xRoot.Add(new XAttribute("ID", events.Key.XPathSelectElement("Values/Standard/GUID").Value));
        xRoot.Add(new Description(events.Key.XPathSelectElement("Values/Standard/GUID").Value).ToXml("N"));
        var xPaths = new XElement("PL");
        xRoot.Add(xPaths);
        foreach (var path in events.Value) {
          var xPath = new XElement("P");
          xPaths.Add(xPath);
          xPath.Add(new XAttribute("ID", path.First().Asset.XPathSelectElement("Values/Standard/Name").Value.Split(' ').Last()));

          var xRewards = new XElement("R");
          xPath.Add(xRewards);
          var rewards = path.First().Asset.XPathSelectElements("Values/Reward/RewardAssets/Item");
          if (rewards.Any()) {
            foreach (var reward in rewards) {
              var xReward = new XElement("I");
              xRewards.Add(xReward);
              xReward.Add(new XAttribute("ID", reward.XPathSelectElement("Reward").Value));
              if (reward.XPathSelectElement("Amount") != null) {
                xReward.Add(new XAttribute("A", reward.XPathSelectElement("Amount").Value));
              }
            }
          }
          else {
            var Products = path.First().Asset.XPathSelectElements("Values/ExpeditionTrade/AvailableGoods/Item");
            if (Products.Any()) {
              foreach (var Product in Products) {
                var xReward = new XElement("I");
                xRewards.Add(xReward);
                xReward.Add(new XAttribute("ID", Product.Element("Product").Value));
                if (Product.Element("Amount")?.Value is string value) {
                  xReward.Add(new XAttribute("A", value));
                }
              }
            }
            var Items = path.First().Asset.XPathSelectElements("Values/ExpeditionTrade/AvailableItems/Item");
            if (Items.Any()) {
              foreach (var Item in Items) {
                var xReward = new XElement("I");
                xRewards.Add(xReward);
                xReward.Add(new XAttribute("ID", Item.Element("Item").Value));
                if (Item.Element("Amount")?.Value is string value) {
                  xReward.Add(new XAttribute("A", value));
                }
              }
            }
          }

          var xOptions = new XElement("OL");
          xPath.Add(xOptions);
          foreach (var option in path) {
            if (option.Asset.XPathSelectElement("Template").Value == "ExpeditionOption" ||
                option.Asset.XPathSelectElement("Template").Value == "ExpeditionMapOption") {
              var xOption = new XElement("O");
              xOptions.AddFirst(xOption);
              xOption.Add(new XAttribute("ID", option.Asset.XPathSelectElement("Values/Standard/GUID").Value));
              var text = new Description(option.Asset.XPathSelectElement("Values/Standard/GUID").Value);
              if (text.Languages[Data.Languages.English] == "Confirm") {
                text = new Description("145001");
              }
              else if (text.Languages[Data.Languages.English] == "Cancel") {
                text = new Description("145002");
              }
              xOption.Add(text.ToXml("T"));
              if (option.Asset.XPathSelectElement("Values/ExpeditionOption/OptionAttribute")?.Value != null) {
                xOption.Add(new Description(Assets.KeyToIdDict[option.Asset.XPathSelectElement("Values/ExpeditionOption/OptionAttribute").Value]).ToXml("OA"));
              }
              if (option.Asset.XPathSelectElement("Values/ExpeditionOption/Requirements")?.HasElements == true) {
                var xRequirements = new XElement("R");
                xOption.Add(xRequirements);
                foreach (var requirement in option.Asset.XPathSelectElements("Values/ExpeditionOption/Requirements/Item")) {
                  var xItem = new XElement("I");
                  xRequirements.Add(xItem);
                  if (requirement.XPathSelectElement("NeededAttribute")?.Value != null) {
                    xItem.Add(new Description(Assets.KeyToIdDict[requirement.XPathSelectElement("NeededAttribute").Value]).ToXml("NA"));
                  }
                  if (requirement.XPathSelectElement("ItemOrProduct")?.Value != null) {
                    xItem.Add(new XAttribute("ID", requirement.XPathSelectElement("ItemOrProduct").Value));
                  }
                  if (requirement.XPathSelectElement("Amount")?.Value != null) {
                    xItem.Add(new XAttribute("A", requirement.XPathSelectElement("Amount").Value));
                  }
                  if (requirement.XPathSelectElement("ItemGroup")?.Value != null) {
                    xItem.Add(new XAttribute("ID", requirement.XPathSelectElement("ItemGroup").Value));
                  }
                }
              }
            }
          }
        }
        return xRoot;
      }

      //local method Find Expedition Events
      List<SourceWithDetailsList> VerasFindExpeditionEvents(string id, HashSet<string> visitedEvents = default, Details mainDetails = default) {
Example #21
0
        public async Task RunAsync(BuildContext context)
        {
            var config = context.GetSharedObject(Constants.Config) as ConfigModel;

            if (config == null)
            {
                throw new ApplicationException(string.Format("Key: {0} doesn't exist in build context", Constants.Config));
            }

            string inputPath           = StepUtility.GetDoxygenXmlOutputPath(config.OutputPath);
            var    processedOutputPath = StepUtility.GetProcessedXmlOutputPath(config.OutputPath);

            if (Directory.Exists(processedOutputPath))
            {
                Directory.Delete(processedOutputPath, recursive: true);
            }
            var dirInfo = Directory.CreateDirectory(processedOutputPath);

            // workaround for Doxygen Bug: it generated xml whose encoding is ANSI while the xml meta is encoding='UTF-8'
            // preprocess in string level: fix style for type with template parameter
            Directory.EnumerateFiles(inputPath, "*.xml").AsParallel().ForAll(
                p =>
            {
                var content   = File.ReadAllText(p, Encoding.UTF8);
                content       = TemplateLeftTagRegex.Replace(content, "$1");
                content       = TemplateRightTagRegex.Replace(content, "$1");
                XDocument doc = XDocument.Parse(content);
                doc.Save(p);
            });

            // get friendly uid
            var uidMapping           = new ConcurrentDictionary <string, string>();
            var compounddefIdMapping = new ConcurrentDictionary <string, string>();
            await Directory.EnumerateFiles(inputPath, "*.xml").ForEachInParallelAsync(
                p =>
            {
                XDocument doc             = XDocument.Load(p);
                var def                   = doc.Root.Element("compounddef");
                var formatedCompoundDefId = string.Empty;
                if (def != null)
                {
                    if (KindToDeletedCollection.Contains(def.Attribute("kind").Value))
                    {
                        File.Delete(p);
                        return(Task.FromResult(1));
                    }
                    var id = def.Attribute("id").Value;
                    formatedCompoundDefId    = def.Element("compoundname").Value.Replace(Constants.NameSpliter, Constants.IdSpliter);
                    uidMapping[id]           = formatedCompoundDefId;
                    compounddefIdMapping[id] = formatedCompoundDefId;
                }
                foreach (var node in doc.XPathSelectElements("//memberdef[@id]"))
                {
                    var id         = node.Attribute("id").Value;
                    uidMapping[id] = PreprocessMemberUid(node, formatedCompoundDefId);
                }
                return(Task.FromResult(1));
            });

            // workaround for Doxygen Bug: it generated extra namespace for code `public string namespace(){ return ""; }`.
            // so if we find namespace which has same name with class, remove it from index file and also remove its file.
            string    indexFile      = Path.Combine(inputPath, Constants.IndexFileName);
            XDocument indexDoc       = XDocument.Load(indexFile);
            var       duplicateItems = (from ele in indexDoc.Root.Elements("compound")
                                        let uid = (string)ele.Attribute("refid")
                                                  group ele by RegularizeUid(uid) into g
                                                  let duplicate = g.FirstOrDefault(e => (string)e.Attribute("kind") == "namespace")
                                                                  where g.Count() > 1 && duplicate != null
                                                                  select(string) duplicate.Attribute("refid")).ToList();

            // Get duplicate Ids when ignore case
            var results       = duplicateItems.Where(id => compounddefIdMapping.ContainsKey(id)).Select(k => compounddefIdMapping.TryRemove(k, out _)).ToList();
            var duplicatedIds = compounddefIdMapping.GroupBy(k => k.Value.ToLower())
                                .Where(g => g.Count() > 1)
                                .Select(kg => kg.Select(kv => kv.Key))
                                .SelectMany(ke => ke).ToList();

            var extendedIdMaping = new ConcurrentDictionary <string, string>();
            await Directory.EnumerateFiles(inputPath, "*.xml").ForEachInParallelAsync(
                p =>
            {
                XDocument doc = XDocument.Load(p);
                if (Path.GetFileName(p) == Constants.IndexFileName)
                {
                    var toBeRemoved = (from item in duplicateItems
                                       select doc.XPathSelectElement($"//compound[@refid='{item}']")).ToList();
                    foreach (var element in toBeRemoved)
                    {
                        element.Remove();
                    }
                }
                else if (duplicateItems.Contains(Path.GetFileNameWithoutExtension(p)))
                {
                    return(Task.FromResult(1));
                }
                else
                {
                    // workaround for Doxygen Bug: https://bugzilla.gnome.org/show_bug.cgi?id=710175
                    // so if we find package section func/attrib, first check its type, if it starts with `public` or `protected`, move it to related section
                    var toBeMoved      = new Dictionary <string, List <XElement> >();
                    var packageMembers = doc.XPathSelectElements("//memberdef[@prot='package']").ToList();
                    foreach (var member in packageMembers)
                    {
                        string kind = (string)member.Parent.Attribute("kind");
                        var type    = member.Element("type");
                        string regulized, access;
                        if (type != null && TryRegularizeReturnType(type.CreateNavigator().InnerXml, out regulized, out access))
                        {
                            if (regulized == string.Empty)
                            {
                                type.Remove();
                            }
                            else
                            {
                                type.ReplaceWith(XElement.Parse($"<type>{regulized}</type>"));
                            }
                            member.Attribute("prot").Value = access;
                            var belongToSection            = GetSectionKind(access, kind);
                            List <XElement> elements;
                            if (!toBeMoved.TryGetValue(belongToSection, out elements))
                            {
                                elements = new List <XElement>();
                                toBeMoved[belongToSection] = elements;
                            }
                            elements.Add(member);
                            member.Remove();
                        }
                    }
                    foreach (var pair in toBeMoved)
                    {
                        var section = doc.XPathSelectElement($"//sectiondef[@kind='{pair.Key}']");
                        if (section == null)
                        {
                            section = new XElement("sectiondef", new XAttribute("kind", pair.Key));
                            doc.Root.Element("compounddef").Add(section);
                        }
                        foreach (var c in pair.Value)
                        {
                            section.Add(c);
                        }
                    }
                }
                foreach (var node in doc.XPathSelectElements("//node()[@refid]"))
                {
                    node.Attribute("refid").Value = RegularizeUid(node.Attribute("refid").Value, uidMapping);
                }
                foreach (var node in doc.XPathSelectElements("//node()[@id]"))
                {
                    node.Attribute("id").Value = RegularizeUid(node.Attribute("id").Value, uidMapping);
                }

                // remove copyright comment
                foreach (var node in doc.XPathSelectElements("//para").ToList())
                {
                    if (CopyRightCommentCollection.Contains(node.Value.Trim()))
                    {
                        node.Remove();
                    }
                }

                string fileName = Path.GetFileNameWithoutExtension(p);
                if (compounddefIdMapping.TryGetValue(fileName, out string formatedFileName))
                {
                    formatedFileName = RegularizeUid(formatedFileName);
                    if (duplicatedIds.Contains(fileName))
                    {
                        fileName = string.Format(Constants.RenamedFormat, formatedFileName, TryGetType(fileName));
                        extendedIdMaping[formatedFileName] = fileName;
                    }
                    else
                    {
                        fileName = formatedFileName;
                    }
                }
                doc.Save(Path.Combine(dirInfo.FullName, fileName + Path.GetExtension(p)));
                return(Task.FromResult(1));
            });

            context.SetSharedObject(Constants.ExtendedIdMappings, extendedIdMaping);
        }
Example #22
0
        public BermudaResult GetData(string domain, IEnumerable<string> blobs, string query, string mapreduce, string merge, DateTime minDate, DateTime maxDate, int remdepth, object[] parameters, string command)
        {
            var args = ParseCommand(command);

            if (remdepth > 0)
            {
                //map
                var blobInterfaces = blobs == null ? AzureInterface.Instance.ListBlobs(domain, minDate.Ticks, maxDate.Ticks) : AzureInterface.Instance.GetBlobInterfacesByNames(domain, blobs);

                var blobSetKey = GetQueryChecksum(domain, string.Join(",", blobInterfaces.Select(x => x.Name)), query, mapreduce, minDate, maxDate, parameters, null);

                //reduce 
                BermudaResult cachedDatapoints;
                if (CachedData.TryGetValue(blobSetKey, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
                    if (CacheTraceMessageLevel < 3) Trace.WriteLine("returned CACHED BLOBS DATAPOINTS results FOR ENTIRE BLOB SET [REMDEPTH:" + remdepth + "]");
                    return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_1" } };
                }
                else
                {

                    var assignments = PartitionBlobs(domain, blobInterfaces, minDate, maxDate, false, true);

                    if (!assignments.Any()) throw new Exception("Specified dataset not loaded: " + domain);

                    ConcurrentDictionary<IPEndPoint, BermudaResult> results = new ConcurrentDictionary<IPEndPoint, BermudaResult>();
                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    List<Task> tasks = new List<Task>();
                    foreach (var ass in assignments)
                    {
                        Task t = new Task((assObj) =>
                        {
                            ZipMetadata assignment = assObj as ZipMetadata;
                            var initiated = DateTime.Now;
                            var blobSubsetKey = GetQueryChecksum(domain, string.Join(",", assignment.Blobs.Select(x => x.Name)), query, mapreduce, minDate, maxDate, parameters, assignment.PeerEndpoint.ToString());
                            Stopwatch sw3 = new Stopwatch();
                            sw3.Start();

                            //see if the cache contains a matching result and return it if it's not outdated
                            BermudaResult cachedDatapoints2;
                            if (CachedData.TryGetValue(blobSubsetKey, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                            {
                                if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB DATAPOINT results FOR BLOB SUBSET [REMDEPTH:" + remdepth + "]");
                                results[assignment.PeerEndpoint] = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_2" } };
                            }
                            else
                            {
                                try
                                {
                                    Stopwatch sw2 = new Stopwatch();
                                    sw2.Start();
                                    BermudaResult subresult = null;

                                    if (assignment.PeerEndpoint.Equals(Endpoint))
                                    {
                                        subresult = GetData(domain, assignment.Blobs.Select(x => x.Name), query, mapreduce, merge, minDate, maxDate, remdepth - 1, parameters, command);

                                    }
                                    else
                                    {
                                        using (var client = AzureInterface.Instance.GetServiceClient(assignment.PeerEndpoint))
                                        {
                                            subresult = client.GetData(domain, query, mapreduce, merge, minDate, maxDate, remdepth - 1, parameters, command);
                                        }
                                    }

                                    sw2.Stop();
                                    subresult.CreatedOn = DateTime.Now.Ticks;
                                    subresult.MetadataObject.Initiated = initiated;
                                    subresult.MetadataObject.Completed = DateTime.Now;
                                    subresult.MetadataObject.OperationTime = sw2.Elapsed;
                                    results[assignment.PeerEndpoint] = CachedData[blobSubsetKey] = subresult;
                                }
                                catch (Exception ex)
                                {
                                    results[assignment.PeerEndpoint] = new BermudaResult { Error = "[Failed Node] " + ex };
                                }
                            }
                        }, ass, TaskCreationOptions.LongRunning);

                        tasks.Add(t);
                        t.Start();
                    }

                    Task.WaitAll(tasks.ToArray());

                    sw.Stop();
                    Trace.WriteLine("Join Time:" + sw.Elapsed);

                    if (results.All(x => x.Value.Error != null)) throw new Exception("All nodes failed:\r\n" + string.Join("\r\n", results.Select(x => x.Value.Error)));

                    //if all results are not the same time throw an error
                    if (results.GroupBy(x => x.Value.DataType).Count() > 1) throw new Exception("Subresults must all return the same type");

                    var dataTypeDescriptor = results.Select(x => x.Value.DataType).FirstOrDefault(x => x != null);

                    if (dataTypeDescriptor == null) return new BermudaResult { Error = "Could not determine the merge type, none of the nodes provided type info" };

                    //use the passed combine espression to make multiple datapoint sets into one

                    var dataType = LinqRuntimeTypeBuilder.GetTypeFromTypeKey(dataTypeDescriptor);

                    //allItems = results.Values.SelectMany(x => x.DataObject)

                    var totalJson = "[" + string.Join(",", results.Values.Select(x => x.Data.Trim('[', ']'))) + "]";

                    var allItems = LinqRuntimeTypeBuilder.DeserializeJson(totalJson, dataTypeDescriptor, true);
                        

                    //var aaa = new JavaScriptSerializer().Deserialize<Datapoint[]>(totalJson);
                    //var ggc = aaa.GroupBy(x => new { x.Id, x.Id2 }).Count();

                    //InvokeSelectManyViaReflectionTheKilla(results.Values.Select(x => x.DataObject), dataType);

                    var mergeFunc = GetMergeFunc(merge, mapreduce, dataType);
                    if (mergeFunc != null)
                    {
                        //var dataType = "kdsajkdsa";
                        var mergeInvokeMethod = mergeFunc.GetType().GetMethod("Invoke");
                        allItems = mergeInvokeMethod.Invoke(mergeFunc, new object[] { allItems }); // MergeDatapoints(results.Values.Where(x => x.Data != null).SelectMany(x => x.Data), mergeFunc);
                    }

                    //figure out the metadata
                    var finalMetadata = new BermudaNodeStatistic { Notes = "Merged Datapoints in " + sw.Elapsed, NodeId = AzureInterface.Instance.CurrentInstanceId, ChildNodes = results.Values.Select(x => x.MetadataObject ).ToArray() };

                    var finalResult = new BermudaResult { DataType = dataTypeDescriptor, DataObject = allItems, CreatedOn = DateTime.Now.Ticks, MetadataObject = finalMetadata };

                    CachedData[blobSetKey] = finalResult;

                    return finalResult;
                }
            }
            else
            {
                ConcurrentDictionary<string, BermudaResult> results = new ConcurrentDictionary<string, BermudaResult>();
                BermudaNodeStatistic stats = new BermudaNodeStatistic();

                var blobInterfaces = AzureInterface.Instance.GetBlobInterfacesByNames(domain, blobs);

                var blobSetKey = GetQueryChecksum(domain, string.Join(",", blobInterfaces.Select(x => x.Name)), query, mapreduce, minDate, maxDate, parameters, Endpoint.ToString());

                BermudaResult cachedDatapoints;
                if (CachedData.TryGetValue(blobSetKey, out cachedDatapoints) && (DateTime.Now.Ticks - cachedDatapoints.CreatedOn) < CacheLifetime)
                {
                    if (CacheTraceMessageLevel < 2) Trace.WriteLine("returned CACHED BLOB SET DATAPOINT results [REMDEPTH:" + remdepth + "]");
                    return new BermudaResult { DataType = cachedDatapoints.DataType, Data = cachedDatapoints.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_3" } };
                }
                else
                {
                    //Chad: short circuiting to test WCF response time in Azure
                    //return new DatapointResult() { Datapoints = new List<Datapoint>(), CreatedOn = DateTime.Now.Ticks, Metadata = new BermudaNodeStatistic() };

                   
                    //IEnumerable<Datapoint> datapoints = null;
                    object datapoints = null;

                    Stopwatch sw = new Stopwatch();
                    sw.Start();

                    Type itemType = null;
                    Type resultType = null;

                    foreach (var blobInterface in blobInterfaces)
                    {
                        var blobKey = GetQueryChecksum(domain, blobInterface.Name, query, mapreduce, minDate, maxDate, parameters, Endpoint.ToString());

                        //see if the cache contains a matching result and return it if it's not outdated
                        BermudaResult cachedDatapoints2;
                        if (CachedData.TryGetValue(blobKey, out cachedDatapoints2) && (DateTime.Now.Ticks - cachedDatapoints2.CreatedOn) < CacheLifetime)
                        {
                            if (CacheTraceMessageLevel < 1) Trace.WriteLine("returned CACHED BLOB DATAPOINT results  [REMDEPTH:" + remdepth + "]");
                            results[blobInterface.Name] = new BermudaResult { DataType = cachedDatapoints2.DataType, Data = cachedDatapoints2.Data, MetadataObject = new BermudaNodeStatistic { Notes = "Cache_Hit_4" } };
                            datapoints = cachedDatapoints2.DataObject;
                        }
                        else
                        {
                            //get mentions
                            var raw = blobInterface.GetData();
                            var rawType = raw.GetType();
                            itemType = ReduceExpressionGeneration.GetTypeOfEnumerable(rawType);
                            var mapreduceFunc = GetMapReduceFunc(mapreduce, itemType, out resultType);
                            var queryFunc = GetFilterFunc(query, itemType);
                    
                            var minDateTicks = minDate.Ticks;
                            var maxDateTicks = maxDate.Ticks;


                            object subresult = raw.AsParallel();
                             
                                //queryFunc == null ?
                                //    raw.AsParallel() :
                                //minDate == DateTime.MinValue && maxDate == DateTime.MaxValue ?
                                //    raw.AsParallel().Where(x => queryFunc) :
                                //    raw.AsParallel().Where(x => x.OccurredOnTicks >= minDateTicks && x.OccurredOnTicks <= maxDateTicks && queryFunc(x, parameters));

                            if (queryFunc != null)
                            {
                                var queryFuncInvoke = queryFunc.GetType().GetMethod("Invoke");
                                subresult = queryFuncInvoke.Invoke(queryFunc, new object[] { subresult });
                            }

                            //reduce them using the passed expression
                            if (mapreduceFunc != null)
                            {
                                var mapReduceFuncInvoke = mapreduceFunc.GetType().GetMethod("Invoke");
                                subresult = mapReduceFuncInvoke.Invoke(mapreduceFunc, new object[] { subresult });
                            }
                            

                            datapoints = subresult;

                            //format a metada string
                            if (!args.Contains("-nocount"))
                            {
                                //stats.TotalItems = raw.Count();
                                //stats.FilteredItems = filtered.Count();
                                //stats.ReducedItems = subresult.Count();
                            }

                            //cache the result
                            //results[blobInterface.Name] = new DatapointResult { Datapoints = subresult, CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                            //CachedDatapoints[blobKey] = new DatapointResult { Datapoints = subresult.ToList(), CreatedOn = DateTime.UtcNow.Ticks, Metadata = stats.Serialize() };
                        }
                    }

                    //figure out the metadata
                    //var finalMetadata = "    [@" + AzureInterface.Instance.CurrentInstanceId + "] Calculated Datapoints:\r\n" + string.Join("\r\n", results.Values.Select(x => x.Metadata));

                    stats.NodeId = AzureInterface.Instance.CurrentInstanceId;
                    stats.Notes = "Computed Datapoints";
                    
                    //Trace.WriteLine("total mentions processed: " + mentionCount);

                    //var datapoints = results.Values.SelectMany(x => x.Datapoints);
                    if (datapoints == null) return new BermudaResult() { MetadataObject = new BermudaNodeStatistic { Notes = "No Results" } };

                    //foreach (var p in datapoints) if (p.IsCount) p.Value = p.Count;

                    var mergeFunc = resultType == null ? null : GetMergeFunc(merge, mapreduce, resultType);
                    if (mergeFunc != null)
                    {
                        var mergeFuncInvoke = mergeFunc.GetType().GetMethod("Invoke");
                        datapoints = mergeFuncInvoke.Invoke(mergeFunc, new object[] { datapoints });
                    }

                    sw.Stop();

                    stats.LinqExecutionTime = sw.Elapsed;

                    var result = CachedData[blobSetKey] = new BermudaResult { DataType = LinqRuntimeTypeBuilder.GetTypeKey(resultType), DataObject = datapoints, CreatedOn = DateTime.Now.Ticks, MetadataObject = stats  };

                    return result;
                }
            }
        }
Example #23
0
 static int Main(string[] args)
 {
     try
     {
         if (args.Length != 1 || (args[0] is var logPath && !File.Exists(logPath)))
         {
             Console.Error.WriteLine("First argument should be log file for analyze.");
             return(-1);
         }
         var    lines    = File.ReadLines(logPath);
         var    reactive = new Regex(@"^\d\d:\d\d:\d\d\.\d\d\d\t0x\w+\t\w+\t.+\[TI: 0x(\w+)] St:(\w+) N:(.*) M:[\w ]", RegexOptions.Compiled | RegexOptions.ECMAScript);
         var    Sts      = new ConcurrentDictionary <string, UInt64>();
         var    StChange = new ConcurrentDictionary <string, List <string> >();
         UInt64 StsCount = 0;
         foreach (var line in lines)
         {
             var r = reactive.Match(line);
             if (r.Success)
             {
                 ++StsCount;
                 foreach (Group g in r.Groups)
                 {
                     Console.WriteLine(g.Value);
                 }
                 var state = r.Groups[2].Value;
                 Sts.AddOrUpdate(state, 0, (k, agr) => agr + 1);
                 var treat = r.Groups[1].Value;
                 StChange.AddOrUpdate(treat, new List <string>(), (k, set) =>
                 {
                     if (/*set.Contains(state)*/ Object.Equals(state, set.LastOrDefault()))
                     {
                         return(set);
                     }
                     else
                     {
                         set.Add(state);
                     }
                     return(set);
                 });
             }
         }
         Console.WriteLine("================");
         foreach (var St in Sts)
         {
             Console.WriteLine($"{St.Key}:{St.Value}");
         }
         Console.WriteLine($"StsCount: {StsCount}");
         var data = StChange.GroupBy(sc => string.Join(" ", sc.Value)).ToDictionary(k => k.Key, v => string.Join(" ", v.Select(vv => vv.Key)));
         foreach (var d in data)
         {
             Console.WriteLine($"{d.Key}: {d.Value}");
         }
         Console.WriteLine("Ended");
     }
     catch (Exception ex)
     {
         Console.Error.WriteLine(ex);
         return(-1);
     }
     return(0);
 }
Example #24
0
 IEnumerator <IGrouping <string, Toc> > IEnumerable <IGrouping <string, Toc> > .GetEnumerator()
 {
     return(_values
            .GroupBy(k => k.Key, k => k.Value)
            .GetEnumerator());
 }
Example #25
0
        public static void Main(string[] args)
        {
            var log = LogManager.GetLogger(MethodBase.GetCurrentMethod().Name);

            if (!Directory.Exists(InputDirectory))
            {
                log.Warn("Папки с данными не существует. Завершение работы приложения.");
                Console.ReadKey(true);
                return;
            }

            while (!Parallel.ForEach(Directory.GetFiles(InputDirectory), AnalyzeFile).IsCompleted)
            {
            }

            try
            {
                if (Directory.Exists(OutputRootDirectory))
                {
                    var rootOutput = new DirectoryInfo(OutputRootDirectory);
                    rootOutput.Empty();
                    rootOutput.Delete(true);
                }

                var directoryInfo = Directory.CreateDirectory(OutputRootDirectory);

                foreach (var sOneGraph in SOneGraphs)
                {
                    DirectoryInfo sub = null;

                    var gradeRootDirectory = $"grade {sOneGraph.Key.ToString()}";

                    if (directoryInfo.GetDirectories().Any(x => x.Name.Equals(gradeRootDirectory)))
                    {
                        sub = directoryInfo.GetDirectories().First(x => x.Name.Equals(gradeRootDirectory));
                    }
                    else
                    {
                        sub = directoryInfo.CreateSubdirectory(gradeRootDirectory);
                    }

                    var a       = 2;
                    var lastSub = $"C(n; 1, {string.Join(", ", sOneGraph.Value.First().Value.Generators.Skip(1).Select(x => $"s{a++}"))})";

                    if (!sub?.GetDirectories().Any(x => x.Name.Equals(lastSub)) ?? false)
                    {
                        sub = sub.CreateSubdirectory(lastSub);
                    }

                    if (sub == null)
                    {
                        throw new Exception($"Не удалось создать папку: {gradeRootDirectory}");
                    }

                    var groupsByNodeCount = sOneGraph.Value.GroupBy(x => x.Value.NodesCount).ToList();
                    groupsByNodeCount.Sort((first, second) => first.Key - second.Key);

                    var minNodes = groupsByNodeCount.Min(x => x.Key);
                    var maxNodes = groupsByNodeCount.Max(x => x.Key);

                    foreach (var group in groupsByNodeCount)
                    {
                        File.AppendAllLines(Path.Combine(sub.FullName, $"all_ring_gr{sOneGraph.Key}_n{minNodes}-{maxNodes}.csv"), new[] { group.First().Value.ToString() });
                        a = 1;

                        var sort = group.ToList();
                        sort.Sort((first, second) =>
                        {
                            for (int i = 0; i < first.Value.Generators.Length; i++)
                            {
                                if (first.Value.Generators[i] > second.Value.Generators[i] || first.Value.Generators[i] < second.Value.Generators[i])
                                {
                                    return(first.Value.Generators[i] - second.Value.Generators[i]);
                                }
                            }

                            return(0);
                        });

                        File.AppendAllLines(Path.Combine(sub.FullName, $"C({group.First().Value.NodesCount}; {string.Join(", ", sOneGraph.Value.First().Value.Generators.Select(x => $"s{a++}")).Substring(1)}).csv"), sort.Select(x => x.Value.ToString()));
                    }
                }

                foreach (var optimalGraph in OptimalGraphs)
                {
                    DirectoryInfo sub = null;

                    var gradeRootDirectory = $"grade {optimalGraph.Key.ToString()}";

                    if (directoryInfo.GetDirectories().Any(x => x.Name.Equals(gradeRootDirectory)))
                    {
                        sub = directoryInfo.GetDirectories().First(x => x.Name.Equals(gradeRootDirectory));
                    }
                    else
                    {
                        sub = directoryInfo.CreateSubdirectory(gradeRootDirectory);
                    }

                    var a       = 1;
                    var lastSub = $"C(n; {string.Join(", ", optimalGraph.Value.First().Value.Generators.Select(x => $"s{a++}"))})";

                    if (!sub?.GetDirectories().Any(x => x.Name.Equals(lastSub)) ?? false)
                    {
                        sub = sub.CreateSubdirectory(lastSub);
                    }
                    else
                    {
                        sub = directoryInfo.GetDirectories().First(x => x.Name.Equals(lastSub));
                    }

                    if (sub == null)
                    {
                        throw new Exception($"Не удалось создать папку: {gradeRootDirectory}");
                    }

                    var groupsByNodeCount = optimalGraph.Value.GroupBy(x => x.Value.NodesCount).ToList();
                    groupsByNodeCount.Sort((pairs, grouping) => pairs.Key - grouping.Key);

                    var minNodes = groupsByNodeCount.Min(x => x.Key);
                    var maxNodes = groupsByNodeCount.Max(x => x.Key);

                    foreach (var group in groupsByNodeCount)
                    {
                        var minDiam       = group.Min(x => x.Value.Diameter);
                        var filteredGroup = group.Where(x => x.Value.Diameter <= minDiam).ToList();
                        var minAvg        = filteredGroup.Min(x => x.Value.AverageLength);
                        filteredGroup = filteredGroup.Where(x => x.Value.AverageLength <= minAvg).ToList();
                        filteredGroup.Sort((first, second) =>
                        {
                            for (int i = 0; i < first.Value.Generators.Length; i++)
                            {
                                if (first.Value.Generators[i] > second.Value.Generators[i] || first.Value.Generators[i] < second.Value.Generators[i])
                                {
                                    return(first.Value.Generators[i] - second.Value.Generators[i]);
                                }
                            }

                            return(0);
                        });


                        File.AppendAllLines(Path.Combine(sub.FullName, $"all_optCirc_gr{optimalGraph.Key}_n{minNodes}-{maxNodes}.csv"), new[] { filteredGroup.First().Value.ToString() });
                        a = 1;
                        File.AppendAllLines(Path.Combine(sub.FullName, $"C({group.First().Value.NodesCount}; {string.Join(", ", filteredGroup.First().Value.Generators.Select(x => $"s{a++}"))}).csv"), filteredGroup.Select(x => x.Value.ToString()));
                    }
                }

                // grade 2
                if (DdGraphs != null && DdGraphs.Any())
                {
                    DirectoryInfo sub = null;

                    var gradeRootDirectory = "grade 2";

                    sub = directoryInfo.GetDirectories().Any(x => x.Name.Equals(gradeRootDirectory)) ? directoryInfo.GetDirectories().First(x => x.Name.Equals(gradeRootDirectory)) : directoryInfo.CreateSubdirectory(gradeRootDirectory);

                    var lastSub = $"C(n; D, D-1)";

                    if (!sub?.GetDirectories().Any(x => x.Name.Equals(lastSub)) ?? false)
                    {
                        sub = sub.CreateSubdirectory(lastSub);
                    }
                    else
                    {
                        sub = directoryInfo.GetDirectories().First(x => x.Name.Equals(lastSub));
                    }

                    if (sub == null)
                    {
                        throw new Exception($"Не удалось создать папку: {gradeRootDirectory}");
                    }

                    var groupsByNodeCount = DdGraphs.GroupBy(x => x.Value.NodesCount).ToList();
                    groupsByNodeCount.Sort((pairs, grouping) => pairs.Key - grouping.Key);

                    var minNodes = groupsByNodeCount.Min(x => x.Key);
                    var maxNodes = groupsByNodeCount.Max(x => x.Key);

                    foreach (var group in groupsByNodeCount)
                    {
                        var minDiam       = group.Min(x => x.Value.Diameter);
                        var filteredGroup = group.Where(x => x.Value.Diameter <= minDiam).ToList();
                        var minAvg        = filteredGroup.Min(x => x.Value.AverageLength);
                        filteredGroup = filteredGroup.Where(x => x.Value.AverageLength <= minAvg).ToList();
                        filteredGroup.Sort((first, second) =>
                        {
                            for (int i = 0; i < first.Value.Generators.Length; i++)
                            {
                                if (first.Value.Generators[i] > second.Value.Generators[i] || first.Value.Generators[i] < second.Value.Generators[i])
                                {
                                    return(first.Value.Generators[i] - second.Value.Generators[i]);
                                }
                            }

                            return(0);
                        });

                        File.AppendAllLines(Path.Combine(sub.FullName, $"all_optCirc_gr{2}_n{minNodes}-{maxNodes}.csv"), new[] { group.First().Value.ToString() });
                    }
                }
            }
            catch (Exception ex)
            {
                log.Error("Не удалось удалить папку с предыдущими результатами. Отмена операции.", ex);
                Console.ReadKey(true);

                return;
            }

            Console.ReadKey(true);
        }
Example #26
0
        private static Scatter GetTraceFromJson(string filePath, out Scatter fitTrace, out Scatter vusTrace,
                                                out DateTime startTime, out DateTime endTime, out DateTime fitTime)
        {
            var lines = File.ReadLines(filePath).AsParallel().WithDegreeOfParallelism(Environment.ProcessorCount)
                        .Select(JsonObject.Parse).ToList();
            var pointsList = lines.Where(it =>
                                         it["type"] == "Point"
                                         ).ToList();
            var gracefulTimespan = TimeSpan.FromSeconds(30);

            startTime = pointsList.Min(it => ParseShortestXsdDateTime(it.Object("data")["time"]))
                        .Floor(TimeSpan.FromSeconds(1));
            endTime = pointsList.Max(it => ParseShortestXsdDateTime(it.Object("data")["time"]))
                      .Subtract(gracefulTimespan);
            var rpsList = new ConcurrentDictionary <int, int>(Enumerable
                                                              .Range(0, (int)Math.Ceiling((endTime - startTime).TotalSeconds) + 1)
                                                              .Select(it => it).ToDictionary(it => it, it => 0));
            var httpReqDurationList = pointsList.Where(it => it["metric"] == "http_req_duration")
                                      .ToList();
            var tempStartTime = startTime;

            Parallel.ForEach(httpReqDurationList, httpReqDuration =>
            {
                var startIndex =
                    (ParseShortestXsdDateTime(httpReqDuration.Object("data")["time"])
                     .Floor(TimeSpan.FromSeconds(1)) - tempStartTime).TotalSeconds;
                var duration = TimeSpan
                               .FromMilliseconds(double.Parse(httpReqDuration.Object("data")["value"])).TotalSeconds;
                var endIndex = (int)Math.Floor(startIndex + duration);
                if (!rpsList.ContainsKey(endIndex))
                {
                    return;
                }

                rpsList[endIndex] = rpsList[endIndex] + 1;
            });
            var rpsTraceDic = rpsList.GroupBy(it => it.Key / 60).ToDictionary(it => it.Key, it => it.Average(
                                                                                  it => it.Value));

            if (rpsTraceDic[rpsTraceDic.Count - 1] == 0)
            {
                rpsTraceDic.Remove(rpsTraceDic.Count - 1);
            }

            var x        = rpsTraceDic.Select(it => tempStartTime.AddMinutes(it.Key)).ToList();
            var rpsTrace = new Scatter()
            {
                x     = x,
                y     = rpsTraceDic.Values.ToList(),
                name  = "Throughput(request/s)",
                yaxis = "y2"
            };

            var limitx = -1;

            for (var i = 2; i < rpsTraceDic.Count - 1; i++)
            {
                var(_, tempk1) = Fit.Line(rpsTraceDic.Keys.Take(i).Select(x => (double)x).ToArray(),
                                          rpsTraceDic.Values.Take(i).ToArray());
                var(_, tempk2) = Fit.Line(rpsTraceDic.Keys.Take(i + 1).Select(x => (double)x).ToArray(),
                                          rpsTraceDic.Values.Take(i + 1).ToArray());
                if (!((tempk1 - tempk2) > 0.1))
                {
                    continue;
                }

                limitx = i;
                break;
            }

            fitTime   = tempStartTime.AddMinutes(limitx);
            var(b, k) = Fit.Line(rpsTraceDic.Keys.Take(limitx).Select(x => (double)x).ToArray(),
                                 rpsTraceDic.Values.Take(limitx).ToArray());

            fitTrace = new Scatter()
            {
                x     = x,
                y     = rpsTraceDic.Keys.Select(i => k * i + b).ToList(),
                name  = "Expected throughput(request/s)",
                yaxis = "y2"
            };

            var vusData = lines.Where(it =>
                                      it["type"] == "Point" &&
                                      it["metric"] == "vus").Select(it =>
                                                                    new Tuple <DateTime, int>(ParseShortestXsdDateTime(it.Object("data")["time"]),
                                                                                              int.Parse(it.Object("data")["value"])))
                          .OrderBy(it => it.Item1).GroupBy(it => it.Item1.Floor(TimeSpan.FromMinutes(1)))
                          .Select(it => new Tuple <DateTime, int>(it.Key.Floor(TimeSpan.FromMinutes(1)), it.Max(t => t.Item2)))
                          .OrderBy(it => it.Item1)
                          .ToList();
            var vusMaxTime = vusData.Max(it => it.Item1).Subtract(gracefulTimespan);

            vusData  = vusData.Where(it => it.Item1 < vusMaxTime).ToList();
            vusTrace = new Scatter()
            {
                x    = vusData.Select(it => it.Item1.AddSeconds(tempStartTime.Second)),
                y    = vusData.Select(it => it.Item2),
                name = "virtual user count",
            };
            var checksData = lines.Where(it => it["type"] == "Point" &&
                                         it["metric"] == "checks").OrderBy(it =>
                                                                           ParseShortestXsdDateTime(it.Object("data")["time"])).ToList();

            var checkTrace = new Scatter()
            {
                x     = checksData.Select(it => ParseShortestXsdDateTime(it.Object("data")["time"])),
                y     = checksData.Select(it => int.Parse(it.Object("data")["value"]) * 100),
                name  = "check",
                yaxis = "y2"
            };

            return(rpsTrace);
        }