private IEnumerable <PackageVersion> GetPackageVersions(IGrouping <Guid, Entity> entityGrouping, Guid websiteId) { var groupings = entityGrouping .GroupBy(e => e.GetAttributeAliasedValue <Guid?>("adx_packageversionid", "version")) .Where(e => e.Key != null); foreach (var grouping in groupings) { var entity = grouping.FirstOrDefault(); if (entity == null) { continue; } var versionId = entity.GetAttributeAliasedValue <Guid?>("adx_packageversionid", "version"); if (versionId == null) { continue; } yield return(new PackageVersion { Description = entity.GetAttributeAliasedValue <string>("adx_description", "version"), DisplayName = entity.GetAttributeAliasedValue <string>("adx_name", "version"), ReleaseDate = entity.GetAttributeAliasedValue <DateTime?>("adx_releasedate", "version").GetValueOrDefault(), RequiredInstallerVersion = entity.GetAttributeAliasedValue <string>("adx_requiredinstallerversion", "version"), Url = GetPackageVersionUrl(websiteId, versionId.Value), Version = entity.GetAttributeAliasedValue <string>("adx_version", "version"), }); } }
private IEnumerable <IParameter> parametersChangedFromBuildingBlock(IGrouping <PKSimBuildingBlockType, IParameter> parametersByBuildingBlockType, ModelSimulation simulation, PKSimProject project) { var parametersToExport = new List <IParameter>(); var parametersByBuildingBlock = parametersByBuildingBlockType.GroupBy(x => x.Origin.BuilingBlockId); foreach (var parametersByBuildingBlockId in parametersByBuildingBlock) { var templateBuildingBlock = templateBuildingBlockFor(simulation, parametersByBuildingBlockId.Key, project); if (templateBuildingBlock == null) { parametersToExport.AddRange(parametersByBuildingBlockId); continue; } var templateBuildingBlockParameters = _containerTask.CacheAllChildren <IParameter>(templateBuildingBlock); foreach (var parameter in parametersByBuildingBlockId) { var templateBuildingBlockParameter = templateParameterFor(parameter, templateBuildingBlockParameters); if (parameterDiffersFromTemplate(templateBuildingBlockParameter, parameter)) { parametersToExport.Add(parameter); } } } return(parametersToExport); }
public List <CustoCatContabil <ItemDespesa> > ObterCustosCat(IGrouping <Empresa, AlocacaoRm> rm, List <RegistroFinanceiro> registros) { var CustoCatContabil = new List <CustoCatContabil <ItemDespesa> >(); foreach (var rm0 in rm.GroupBy(p => p.RecursoMaterial.CategoriaContabil)) { var itemDespesa = new List <ItemDespesa>(); foreach (var registro in registros .Where(p => p.RecursoMaterial != null && p.RecursoMaterial.CategoriaContabilValor == rm0.First().RecursoMaterial.CategoriaContabilValor).ToList()) { itemDespesa.Add(new ItemDespesa { NomeItem = registro.NomeItem, JustificaItem = rm0.First().Justificativa, QtdeItem = registro.QtdItens, ValorIndItem = registro.ValorUnitario.ToString(), TipoItem = registro.TipoValor, ItemLabE = registro.EquiparLabExistente.ToString(), ItemLabN = registro.EquiparLabNovo.ToString() }); } CustoCatContabil.Add(new CustoCatContabil <ItemDespesa> { CategoriaContabil = rm0.First().RecursoMaterial.CategoriaContabilValor, ItemDespesa = itemDespesa }); } return(CustoCatContabil); }
private AnnualPlanKindGameModel GetKindAnnualPlanModel(IGrouping <int, GameDto> gameByKind) { var annualPlanSubKindGameModels = new List <AnnualPlanSubKindGameModel>(); foreach (var gameBySubKind in gameByKind.GroupBy(x => x.SubKind)) { if (gameBySubKind.Key.HasValue) { AnnualPlanSubKindGameModel subKindGameModel = GetSubKindAnnualPlanModel(gameByKind.Key, gameBySubKind); annualPlanSubKindGameModels.Add(subKindGameModel); } } var annualPlanKindGameModel = new AnnualPlanKindGameModel { Kind = gameByKind.Key, KindName = GamesByType.FirstOrDefault(x => x.Kind == gameByKind.Key).KindName, Type = (GameType)GameType, AnnualPlanSubKindGameModels = annualPlanSubKindGameModels }; List <int> gameIds = GamesByType.Where(x => x.Kind == gameByKind.Key).Select(x => x.Id).ToList(); annualPlanKindGameModel = SetPlans(annualPlanKindGameModel, gameIds); return(annualPlanKindGameModel); }
private List <Mapping> GetMappings(IGrouping <int, MeterData.EventRow> lineGrouping) { Func <FaultLocationData.FaultSummaryRow, bool> filter = fault => fault.IsSelectedAlgorithm != 0 && fault.IsSuppressed == 0; List <FaultTimeline> meterGroupings = lineGrouping .GroupBy(evt => evt.MeterID) .Select(meterGrouping => new FaultTimeline() { Meter = m_dbAdapterContainer.GetAdapter <MeterInfoDataContext>().Meters.SingleOrDefault(meter => meter.ID == meterGrouping.Key), Faults = meterGrouping.SelectMany(evt => m_dbAdapterContainer.GetAdapter <FaultSummaryTableAdapter>().GetDataBy(evt.ID)).Where(filter).OrderBy(fault => fault.Inception).ToList() }) .Where(meterGrouping => meterGrouping.Faults.Any()) .ToList(); return(meterGroupings .SelectMany(meterGrouping1 => meterGroupings.Select(meterGrouping2 => new { Left = meterGrouping1, Right = meterGrouping2 })) .Where(mapping => mapping.Left.Meter.MeterLocationID < mapping.Right.Meter.MeterLocationID) .Where(mapping => mapping.Left.Faults.Count == mapping.Right.Faults.Count) .SelectMany(mapping => mapping.Left.Faults.Zip(mapping.Right.Faults, (left, right) => new Mapping(left, right))) .ToList()); }
private MessageBuilder GetMessageBuilder <T> (MessageTypeHandler <T> messageTypeHandler, IGrouping <ulong, Datom> datoms) { var messageBuilder = new MessageBuilder(); messageBuilder.Message = messageTypeHandler.Factory(datoms.Key); datoms.GroupBy(y => new ParameterWithArrayIndex() { Parameter = y.Parameter, ParameterArrayIndex = y.ParameterArrayIndex }) .Select(grouping => grouping.OrderByDescending(x => x.TransactionId).First()) .ToList() .ForEach(x => { var fieldHandler = messageTypeHandler.FieldsByParameterNumber[x.Parameter]; if (fieldHandler.FieldClass == FieldClass.Simple) { var simpleFieldHandler = (SimpleField <T>)fieldHandler; simpleFieldHandler.Deserialize((T)messageBuilder.Message, x); } else { var referenceFieldHandler = (ReferenceField <T>)fieldHandler; messageBuilder.References.Add(new ReferenceFromBuilder() { Parameter = x.Parameter, ReferencedTypeId = referenceFieldHandler.ReferencedTypeId, ReferencedIdentity = referenceFieldHandler.DeserializeIdentity(x), RepeatedFieldIndex = x.ParameterArrayIndex, }); } }); return(messageBuilder); }
private static DeptWiseAttendanceDTO CreateAttendanceDTO(IGrouping <int, ActivityLogDTO> grp, IEnumerable <Employee> allEmployees) { var deptMembers = allEmployees.Where(e => e.Deprtment.Id == grp.First().Department.Id); var dto = new DeptWiseAttendanceDTO { DepartmentName = grp.First().Department.Name, Attendance = grp.GroupBy(gd => gd.Employee.Id) .Select(empGroup => new AttendanceDTO { EmployeeName = empGroup.First().Employee.Name, EmployeeId = empGroup.First().Employee.Id, Attended = true, Date = empGroup.First().TimeStamp.ToString("yyyy-MM-dd") }) .ToList() }; var absents = deptMembers.Where(m => !dto.Attendance.Any(a => a.EmployeeId == m.Id)); var date = dto.Attendance.First().Date; dto.Attendance.AddRange(absents.Select(a => new AttendanceDTO { EmployeeName = a.Name, EmployeeId = a.Id, Attended = false, Date = date })); return(dto); }
private void AddVersionedOsLegs(BuildMatrixInfo matrix, IGrouping <PlatformId, PlatformInfo> platformGrouping) { var versionGroups = platformGrouping .GroupBy(platform => new { // Assumption: Dockerfile path format <ProductVersion>/<ImageVariant>/<OsVariant>/... DotNetVersion = GetDotNetVersionFromPath(platform.DockerfilePathRelativeToManifest), OsVariant = platform.Model.OsVersion }); foreach (var versionGrouping in versionGroups) { IEnumerable <PlatformInfo> subgraphs = versionGrouping .GetCompleteSubgraphs(platform => GetPlatformDependencies(platform, platformGrouping)) .SelectMany(subgraph => subgraph); BuildLegInfo leg = new BuildLegInfo() { Name = $"{versionGrouping.Key.DotNetVersion}-{versionGrouping.Key.OsVariant}" }; matrix.Legs.Add(leg); AddCommonVariables(platformGrouping, leg); leg.Variables.Add(("dotnetVersion", versionGrouping.Key.DotNetVersion)); leg.Variables.Add(("osVariant", versionGrouping.Key.OsVariant)); IEnumerable <string> dockerfilePaths = GetDockerfilePaths(subgraphs) .Union(GetCustomLegGroupingDockerfilePaths(subgraphs)); AddImageBuilderPathsVariable(dockerfilePaths.ToArray(), leg); } }
private void AddVersionedOsLegs(MatrixInfo matrix, IGrouping <PlatformId, PlatformInfo> platformGrouping) { var versionGroups = platformGrouping .GroupBy(platform => new { // Assumption: Dockerfile path format <ProductVersion>/<ImageVariant>/<OsVariant>/... DotNetVersion = platform.DockerfilePath.Split(s_pathSeparators)[0], OsVariant = platform.DockerfilePath.Split(s_pathSeparators)[2].TrimEnd("-slim") }); foreach (var versionGrouping in versionGroups) { LegInfo leg = new LegInfo() { Name = $"{versionGrouping.Key.DotNetVersion}-{versionGrouping.Key.OsVariant}" }; matrix.Legs.Add(leg); AddPlatformVariables(platformGrouping, leg); leg.Variables.Add(("dotnetVersion", versionGrouping.Key.DotNetVersion)); leg.Variables.Add(("osVariant", versionGrouping.Key.OsVariant)); IEnumerable <string> dockerfilePaths = GetDockerfilePaths(versionGrouping) .Union(GetCustomLegGroupingDockerfilePaths(versionGrouping)); AddImageBuilderPathsVariable(dockerfilePaths.ToArray(), leg); } }
private static StringBuilder GenerateNamespace(IGrouping <string, RouteInfo> namespaceGroup, bool isLast) { StringBuilder sb = new StringBuilder(); sb.AppendLine($"namespace {GetConvertedNamespace(namespaceGroup.Key)}"); sb.AppendLine("{"); if (!string.IsNullOrEmpty(_cacheDictionaryClass)) { sb.AppendLine(_cacheDictionaryClass); _cacheDictionaryClass = string.Empty; } var group = namespaceGroup.GroupBy(o => o.ControllerName); for (int i = 0; i < group.Count(); i++) { sb.Append(GenerateClass(group.ElementAt(i), i == (group.Count() - 1))); } sb.AppendLine("}"); if (!isLast) { sb.AppendLine(); } return(sb); }
private void ExecuteYearGroup( FixerLogicParameters parameters, System.IO.DirectoryInfo outputDirectory, IGrouping <int, ImageFixInfo> yearGroup, ref int totalCount ) { var yearDirectory = System.IO.Path.Combine(outputDirectory.FullName, yearGroup.Key.ToString(CultureInfo.InvariantCulture)); if (!System.IO.Directory.Exists(yearDirectory)) { OnNotifyMessage($"Creating year directory {yearDirectory}."); if (!parameters.IsPreview) { System.IO.Directory.CreateDirectory(yearDirectory); } } int yearCount = 0; var yearGroupByMonth = yearGroup.GroupBy(o => new { Month = o.Month, MonthName = o.MonthName }); foreach (var monthGroup in yearGroupByMonth) { ExecuteMonthGroup(parameters, yearDirectory, monthGroup.Key.Month, monthGroup.Key.MonthName, monthGroup, ref yearCount); } OnNotifyMessage($"Copied {yearCount} files to {yearDirectory}."); totalCount += yearCount; }
private static void AddEntireRecsetGroup(IExecutionEnvironment environment, IGrouping <string, IServiceTestOutput> groupedRecset) { var dataListItems = groupedRecset.GroupBy(item => DataListUtil.ExtractIndexRegionFromRecordset(item.Variable)); foreach (var dataListItem in dataListItems) { var recSetsToAssign = new List <IServiceTestOutput>(); var empty = true; foreach (var listItem in dataListItem) { if (!string.IsNullOrEmpty(listItem.Value)) { empty = false; } recSetsToAssign.Add(listItem); } if (!empty) { foreach (var serviceTestInput in recSetsToAssign) { environment.Assign(DataListUtil.AddBracketsToValueIfNotExist(serviceTestInput.Variable), serviceTestInput.Value, 0); } } } }
private static void AddVersionedOsLegs( MatrixInfo matrix, IGrouping <dynamic, PlatformInfo> platformGrouping, bool includeArchitectureName) { var versionGroups = platformGrouping .GroupBy(platform => new { // Assumption: Dockerfile path format <ProductVersion>/<ImageVariant>/<OsVariant>/... DotNetVersion = platform.DockerfilePath.Split(PathSeparators)[0], OsVariant = platform.DockerfilePath.Split(PathSeparators)[2].TrimEnd("-slim") }); foreach (var versionGrouping in versionGroups) { string legName = $"{versionGrouping.Key.DotNetVersion}-{versionGrouping.Key.OsVariant}"; if (includeArchitectureName) { legName += $"-{GetArchitectureDisplayName(platformGrouping)}"; } LegInfo leg = new LegInfo() { Name = legName }; matrix.Legs.Add(leg); leg.Variables.Add(("dotnetVersion", versionGrouping.Key.DotNetVersion)); leg.Variables.Add(("osType", platformGrouping.Key.OS.ToString().ToLowerInvariant())); leg.Variables.Add(("osVariant", versionGrouping.Key.OsVariant)); leg.Variables.Add(("osVersion", platformGrouping.Key.OS == OS.Windows ? platformGrouping.Key.OsVersion : "*")); leg.Variables.Add(("architecture", platformGrouping.Key.Architecture.ToString().ToLowerInvariant())); } }
private MonthlyReportKindGameModel GetKindMonthlyReportModel(IGrouping <int, GameDto> gamesByKind) { var monthlyReportSubKindGameModels = new List <MonthlyReportSubKindGameModel>(); foreach (var subKind in gamesByKind.GroupBy(x => x.SubKind)) { if (subKind.Key.HasValue) { MonthlyReportSubKindGameModel subKindGameModel = GetSubKindMonthlyReportModel(gamesByKind.Key, subKind); monthlyReportSubKindGameModels.Add(subKindGameModel); } } var monthlyReportKindGameModel = new MonthlyReportKindGameModel { Kind = gamesByKind.Key, KindName = GamesByType.FirstOrDefault(x => x.Kind == gamesByKind.Key).KindName, MonthlyReportSubKindGameModels = monthlyReportSubKindGameModels }; List <int> gameIds = GamesByType.Where(x => x.Kind == gamesByKind.Key).Select(x => x.Id).ToList(); monthlyReportKindGameModel = SetPlans(monthlyReportKindGameModel, gameIds); return(monthlyReportKindGameModel); }
private async Task VerificaTurmasComPendenciaFechamento(IGrouping <long, PeriodoFechamentoBimestre> periodosEncerrando, ModalidadeTipoCalendario modalidade, double percentualFechamentoInsuficiente) { var dre = periodosEncerrando.First().PeriodoFechamento.Ue.Dre; var listaUes = new List <(bool notificar, Ue ue, int quantidadeTurmasPendentes)>(); foreach (var periodoEncerrandoBimestre in periodosEncerrando.GroupBy(c => c.PeriodoEscolar.Bimestre)) { foreach (var periodoEncerrando in periodoEncerrandoBimestre) { var turmas = await mediator.Send(new ObterTurmasComFechamentoOuConselhoNaoFinalizadosQuery(periodoEncerrando.PeriodoFechamento.UeId.Value, DateTime.Now.Year, periodoEncerrando.PeriodoEscolarId, modalidade.ObterModalidadesTurma(), DateTime.Now.Semestre())); if (turmas != null && turmas.Any()) { listaUes.Add(await VerificaTurmasComPendenciaFechamentoNaUe(turmas, periodoEncerrando.PeriodoFechamento.Ue, percentualFechamentoInsuficiente)); } } if (listaUes.Any(c => c.notificar)) { await NotificarUesInsuficientes(listaUes, periodoEncerrandoBimestre.Key, dre, percentualFechamentoInsuficiente); } if (EhFechamentoFinal(periodoEncerrandoBimestre.Key, modalidade)) { await VerificaPendenciaFechamentoFinal(periodoEncerrandoBimestre, modalidade, dre, percentualFechamentoInsuficiente); } } }
async Task GenerateNamespaceDoc(DirectoryInfo directory, IGrouping <string, ApiEntry> ns) { var fileName = Path.Combine(directory.FullName, ToMdFileName(ns.Key)); using (var file = File.CreateText(fileName)) { var writer = new MarkdownWriter(file); await writer.BeginParagraphAsync(); await writer.WriteAsync($"← {MarkdownInline.HRef("Index", "Api-Index")} ← {MarkdownInline.HRef("Namespace Index", "Namespace-Index")}"); await writer.EndParagraphAsync(); await writer.WriteHeaderAsync(1, ns.Key); await writer.BeginParagraphAsync(); foreach (var typeGroup in ns.GroupBy(e => e.DeclaringEntry ?? e).OrderBy(g => g.Key.FullName)) { await writer.WriteLineAsync(MarkdownInline.Strong(MemberGenerator.LinkTo(typeGroup.Key.Name, typeGroup.Key))); } await writer.EndParagraphAsync(); await writer.FlushAsync(); } }
private static DeptWiseAttendanceDTO CreateAttendanceDTO(IGrouping<int, ActivityLogDTO> grp, IEnumerable<Employee> allEmployees) { var deptMembers = allEmployees.Where(e => e.Deprtment.Id == grp.First().Department.Id); var dto = new DeptWiseAttendanceDTO { DepartmentName = grp.First().Department.Name, Attendance = grp.GroupBy(gd => gd.Employee.Id) .Select(empGroup => new AttendanceDTO { EmployeeName = empGroup.First().Employee.Name, EmployeeId = empGroup.First().Employee.Id, Attended = true, Date = empGroup.First().TimeStamp.ToString("yyyy-MM-dd") }) .ToList() }; var absents = deptMembers.Where(m => !dto.Attendance.Any(a => a.EmployeeId == m.Id)); var date = dto.Attendance.First().Date; dto.Attendance.AddRange(absents.Select(a => new AttendanceDTO { EmployeeName = a.Name, EmployeeId = a.Id, Attended = false, Date = date })); return dto; }
private static async Task ProcessDuplicatesAsync(IGraphServiceClient graphClient, Calendar calendar, IGrouping <GroupByFields, Event> duplicateGroup) { WriteInfo($"- {duplicateGroup.Key} ({duplicateGroup.Count()} items)"); // Check if more than one event have the same ID. var idGroups = duplicateGroup.GroupBy(e => e.Id).ToList(); if (idGroups.Any(g => g.Count() > 1)) { WriteInfo(" The impossible seems to have happened: Multiple events have the same id. Here they are:"); foreach (var idGroup in idGroups.Where(g => g.Count() > 1)) { WriteInfo($" - {idGroup.Count()} events with ID {idGroup.Key}"); } } // Double-check for existance so that we don't use "phantom" events. var events = await GetNonPhantomsAsync(graphClient, calendar, idGroups.Select(g => g.Key).ToList()); WriteInfo($" Number of unique \"non-phantom\" IDs: {events.Count}"); if (events.Count > 1 && _options.Fix) { if (calendar.CanEdit.HasValue && calendar.CanEdit.Value) { await RemoveDuplicatesAsync(graphClient, calendar, events); } else { WriteInfo(" Calendar is not editable so we can't fix."); } } }
private void GenerateNamespace(IGrouping <string, MetaMethodInfo> namespacePair) { var ns = namespacePair.Key; sb.AppendFormat("\t\t<namespace name=\"{0}\">\n", ns); // 根据class分组 foreach (var classPair in namespacePair.GroupBy(method => { var cls = method.method.ReflectedType; if (!cls.IsGenericType) { return(cls); } else { if (cls.IsGenericTypeDefinition) { return(cls); } else { return(cls.GetGenericTypeDefinition()); } } })) { GenerateClass(classPair); } sb.AppendFormat("\t\t</namespace>\n"); }
private List <Mapping> GetMappings(AdoDataConnection connection, IGrouping <int, Event> lineGrouping) { TableOperations <Meter> meterTable = new TableOperations <Meter>(connection); TableOperations <FaultGroup> faultGroupTable = new TableOperations <FaultGroup>(connection); TableOperations <FaultSummary> faultSummaryTable = new TableOperations <FaultSummary>(connection); Func <FaultSummary, bool> filter = fault => { if (!Convert.ToBoolean(fault.IsSelectedAlgorithm)) { return(false); } List <FaultGroup> faultGroups = faultGroupTable .QueryRecordsWhere("EventID = {0}", fault.EventID) .ToList(); foreach (FaultGroup faultGroup in faultGroups) { bool?faultDetectionResult = faultGroup.FaultDetectionLogicResult; // Fault validation (based on line length) doesn't apply because the fault may have been // invalidated due to a high impedance so we only check whether it has been suppressed bool faultValidationResult = !fault.IsSuppressed; if (faultDetectionResult == false || (m_faultLocationSettings.UseDefaultFaultDetectionLogic && !faultValidationResult)) { return(false); } } return(true); }; List <FaultTimeline> meterGroupings = lineGrouping .GroupBy(evt => evt.MeterID) .Select(meterGrouping => new FaultTimeline() { Meter = meterTable.QueryRecordWhere("ID = {0}", meterGrouping.Key), Faults = meterGrouping .SelectMany(evt => faultSummaryTable.QueryRecordsWhere("EventID = {0}", evt.ID)) .Where(filter) .OrderBy(fault => fault.Inception) .ToList() }) .Where(meterGrouping => meterGrouping.Faults.Any()) .ToList(); return(meterGroupings .SelectMany(meterGrouping1 => meterGroupings.Select(meterGrouping2 => new { Left = meterGrouping1, Right = meterGrouping2 })) .Where(mapping => mapping.Left.Meter.MeterLocationID < mapping.Right.Meter.MeterLocationID) .Where(mapping => mapping.Left.Faults.Count == mapping.Right.Faults.Count) .SelectMany(mapping => mapping.Left.Faults.Zip(mapping.Right.Faults, (left, right) => new Mapping(left, right))) .ToList()); }
internal NtHandleObjectGroup(IGrouping <ulong, NtHandle> group) { Object = group.Key; Count = group.Count(); Handles = group; ProcessCount = group.GroupBy(h => h.ProcessId).Count(); _get_values = new Lazy <Tuple <string, SecurityDescriptor> >(GetValues); }
private static IEnumerable<IAutoCompleteListItem> parseClientGroup(IGrouping<ulong, Toggl.TogglAutocompleteView> c) { var projectItems = c.GroupBy(p => p.ProjectID).Select(parseProjectGroup); if (c.Key == 0) return projectItems; var clientName = c.First().ClientLabel; return new ClientCategory(clientName, projectItems.ToList()).Yield<IAutoCompleteListItem>(); }
private TestAssembly CreateAssembly( IGrouping <string, TestResultInfo> resultsByAssembly) { return(new () { Name = resultsByAssembly.Key, Fixtures = resultsByAssembly.GroupBy(a => a.Type).Select(this.CreateFixture) }); }
/// <summary> /// Given a group of votes (grouped by task), create and return /// a list of VoteNodes that collapse together votes that are /// sub-votes of each other. /// </summary> /// <param name="taskGroup">A set of votes with the same task value.</param> /// <returns>Returns a list of VoteNodes that collapse similar votes.</returns> public static IEnumerable<VoteNode> GetVoteNodes(IGrouping<string, KeyValuePair<string, HashSet<string>>> taskGroup) { var groupByFirstLine = taskGroup.GroupBy(v => v.Key.GetFirstLine(), Agnostic.StringComparer); List<VoteNode> nodeList = new List<VoteNode>(); VoteNode parent; foreach (var voteGroup in groupByFirstLine) { parent = null; if (voteGroup.Count() == 1) { string planname = VoteString.GetPlanName(voteGroup.Key); if (planname != null && VoteCounter.Instance.HasPlan(planname)) { var vote = voteGroup.First(); parent = new VoteNode(vote.Key, vote.Value); nodeList.Add(parent); continue; } } foreach (var vote in voteGroup) { var lines = vote.Key.GetStringLines(); if (parent == null) { var voters = lines.Count == 1 ? vote.Value : null; parent = new VoteNode(lines[0], voters); } if (lines.Count == 1) { parent.AddVoters(vote.Value); } else if (lines.Count == 2 && !string.IsNullOrEmpty(VoteString.GetVotePrefix(lines[1]))) { parent.AddChild(lines[1], vote.Value); } else { parent.AddChild(vote.Key, vote.Value); } } if (parent != null) { nodeList.Add(parent); } } return nodeList.OrderByDescending(v => v.VoterCount); }
private static ProductionRecap_LineProduct_ByLine_ByType CreateByTypeGroup(IGrouping <string, ProductionRecapLot> byType) { var typeGroup = new ProductionRecap_LineProduct_ByLine_ByType { Type = byType.Key, ItemsByProduct = byType.GroupBy(g => g.ChileProduct.ProductName).Select(g => CreateWeightItem(g)) .OrderBy(i => i.Name).ToList() }; return(typeGroup); }
public ProductionRecap_TestItem(IGrouping <string, ProductionRecapLot> group, Func <string, string> processName) { Name = processName == null ? group.Key : processName(group.Key); var results = group.GroupBy(g => g.TestResult).ToDictionary(g => g.Key, g => g.ToList()); Passed = WeightOf(TestResults.Pass, results); Failed = WeightOf(TestResults.Fail, results); NonCntrl = WeightOf(TestResults.NonCntrl, results); InProc = WeightOf(TestResults.InProc, results); }
private List <Mapping> GetMappings(IGrouping <int, MeterData.EventRow> lineGrouping) { Func <FaultSummaryRow, bool> filter = fault => { FaultGroupTableAdapter faultGroupAdapter; if (!Convert.ToBoolean(fault.IsSelectedAlgorithm)) { return(false); } faultGroupAdapter = m_dbAdapterContainer.GetAdapter <FaultGroupTableAdapter>(); foreach (FaultGroupRow faultGroup in faultGroupAdapter.GetDataByEvent(fault.EventID)) { bool?faultDetectionResult = !faultGroup.IsFaultDetectionLogicResultNull() ? Convert.ToBoolean(faultGroup.FaultDetectionLogicResult) : (bool?)null; // Fault validation (based on line length) doesn't apply because the fault may have been // invalidated due to a high impedance so we only check whether it has been suppressed bool faultValidationResult = !Convert.ToBoolean(fault.IsSuppressed); if (faultDetectionResult == false || (m_faultLocationSettings.UseDefaultFaultDetectionLogic && !faultValidationResult)) { return(false); } } return(true); }; List <FaultTimeline> meterGroupings = lineGrouping .GroupBy(evt => evt.MeterID) .Select(meterGrouping => new FaultTimeline() { Meter = m_dbAdapterContainer.GetAdapter <MeterInfoDataContext>().Meters.SingleOrDefault(meter => meter.ID == meterGrouping.Key), Faults = meterGrouping.SelectMany(evt => m_dbAdapterContainer.GetAdapter <FaultSummaryTableAdapter>().GetDataBy(evt.ID)).Where(filter).OrderBy(fault => fault.Inception).ToList() }) .Where(meterGrouping => meterGrouping.Faults.Any()) .ToList(); return(meterGroupings .SelectMany(meterGrouping1 => meterGroupings.Select(meterGrouping2 => new { Left = meterGrouping1, Right = meterGrouping2 })) .Where(mapping => mapping.Left.Meter.MeterLocationID < mapping.Right.Meter.MeterLocationID) .Where(mapping => mapping.Left.Faults.Count == mapping.Right.Faults.Count) .SelectMany(mapping => mapping.Left.Faults.Zip(mapping.Right.Faults, (left, right) => new Mapping(left, right))) .ToList()); }
private static IEnumerable <IAutoCompleteListItem> parseClientGroup(IGrouping <ulong, Toggl.TogglAutocompleteView> c) { var projectItems = c.GroupBy(p => p.ProjectID).Select(parseProjectGroup); if (c.Key == 0) { return(projectItems); } var clientName = c.First().ClientLabel; return(new ClientCategory(clientName, projectItems.ToList()).Yield <IAutoCompleteListItem>()); }
private Dictionary <ulong, MessageBuilder> GetMessageBuildersForType <T>(IGrouping <ushort, Datom> datoms) { MessageTypeHandler <T> messageTypeHandler = (MessageTypeHandler <T>)_messageTypeHandlerByTypeId[datoms.Key]; return(datoms.GroupBy(x => x.Identity) .Select(groupedByIdentity => new KeyValuePair <ulong, MessageBuilder>( groupedByIdentity.Key, GetMessageBuilder(messageTypeHandler, groupedByIdentity) ) ).ToDictionary(x => x.Key, x => x.Value)); }
private IEnumerable <ComponenteCurricularPlanejamentoDiarioDto> AgrupaAulasComponentes(IGrouping <int?, AulaPlanoAulaDto> aulasBimestre, bool exibirDetalhamento) { foreach (var agrupamentoComponente in aulasBimestre.GroupBy(c => c.ComponenteCurricular)) { var componente = new ComponenteCurricularPlanejamentoDiarioDto(); componente.Nome = agrupamentoComponente.Key; componente.PlanejamentoDiario = ObterDadosAulasComponente(agrupamentoComponente, exibirDetalhamento); yield return(componente); } }
private static IEnumerable <WorkItem> SumUpDurationsPerTicket(IGrouping <DateTime, WorkItem> workItems) { var perTicketNumber = workItems.GroupBy(wi => wi.TicketNumber); return(perTicketNumber.Select(itemsPerTicket => new WorkItem(itemsPerTicket.Key) { Comment = string.Join(" | ", itemsPerTicket.Select(item => item.Comment).Distinct()), Started = workItems.Key, Duration = SumOf(itemsPerTicket.Select(item => item.Duration).ToList()) }).ToList()); }
public void Calc(IGrouping<string, SpecItem> itemGroup, SpecTable specTable) { // itemGroup - элементы одной группы. // Нужно сгруппировать по ключевому свойству var uniqRecs = itemGroup.GroupBy(m => m.Key).OrderBy(m => m.Key, new AcadLib.Comparers.AlphanumComparator()); foreach (var urec in uniqRecs) { SpecRecord rec = new SpecRecord(urec.Key, urec.ToList(), specTable); Records.Add(rec); } }
public void Calc(IGrouping <string, SpecItem> itemGroup, SpecTable specTable) { // itemGroup - элементы одной группы. // Нужно сгруппировать по ключевому свойству var uniqRecs = itemGroup.GroupBy(m => m.Key).OrderBy(m => m.Key, new AcadLib.Comparers.AlphanumComparator()); foreach (var urec in uniqRecs) { SpecRecord rec = new SpecRecord(urec.Key, urec.ToList(), specTable); Records.Add(rec); } }
private static void WriteGroup(IGrouping<int, Packet> groups) { var groupDir = groups.GroupBy(p => p.Direction); foreach (var group in groupDir) { var fileName = Folder + "/" + Opcodes.GetOpcodeName(groups.Key, group.Key) + ".pkt"; using (var fileStream = new FileStream(fileName, FileMode.Append, FileAccess.Write)) using (var writer = new BinaryWriter(fileStream, _encoding)) foreach (var packet in group) { writer.Write((ushort)packet.Opcode); writer.Write((int)packet.Length); writer.Write((byte)packet.Direction); writer.Write((ulong)Utilities.GetUnixTimeFromDateTime(packet.Time)); writer.Write(packet.GetStream(0)); } } }
private static void WriteGroup(IGrouping<int, Packet> groups) { var groupDir = groups.GroupBy(p => p.Direction); foreach (var group in groupDir) { var fileName = "Fusion.pkt"; using (var fileStream = new FileStream(fileName, FileMode.Append, FileAccess.Write)) using (var writer = new BinaryWriter(fileStream, _encoding)) foreach (var packet in group) { writer.Write((ushort)packet.Opcode); writer.Write((int)packet.Length); writer.Write((byte)packet.Direction); writer.Write((ulong)Utilities.GetUnixTimeFromDateTime(packet.Time)); writer.Write(packet.GetStream(0)); // TODO: Add ConnIdx in a backwards compatible way } } }
private ApiDeclaration CreateDeclaration(IGrouping<string, ApiDescription> apiDescriptionGroup) { var modelSpecRegistrar = new ModelSpecRegistrar(); // Group further by relative path - each group corresponds to an ApiSpec var apiSpecs = apiDescriptionGroup .GroupBy(apiDesc => apiDesc.RelativePath) .Select(apiDescGrp => CreateApiSpec(apiDescGrp, modelSpecRegistrar)) .ToList(); return new ApiDeclaration { ApiVersion = "1.0", SwaggerVersion = SwaggerVersion, BasePath = _basePathResolver().TrimEnd('/'), ResourcePath = apiDescriptionGroup.Key, Apis = apiSpecs, Models = modelSpecRegistrar.ToDictionary() }; }
private ApiDeclaration DescriptionGroupToApiDeclaration(IGrouping<string, ApiDescription> descriptionGroup) { var modelSpecsBuilder = new ModelSpecsBuilder(); // Group further by relative path - each group corresponds to an ApiSpec var apiSpecs = descriptionGroup .GroupBy(ad => ad.RelativePath) .Select(dg => DescriptionGroupToApiSpec(dg, modelSpecsBuilder)) .ToList(); return new ApiDeclaration { apiVersion = "1.0", swaggerVersion = SwaggerVersion, basePath = _basePathAccessor(), resourcePath = descriptionGroup.Key, apis = apiSpecs, models = modelSpecsBuilder.Build() }; }
private ApiDeclaration CreateDeclaration(IGrouping<string, ApiDescription> apiDescriptionGroup) { var complexModels = new Dictionary<string, DataType>(); // Group further by relative path - each group corresponds to an Api var apis = apiDescriptionGroup .GroupBy(apiDesc => apiDesc.RelativePathSansQueryString()) .Select(apiDescGrp => CreateApi(apiDescGrp, complexModels)) .OrderBy(api => api.Path) .ToList(); return new ApiDeclaration { SwaggerVersion = SwaggerVersion, ApiVersion = _apiVersion, BasePath = _basePath, ResourcePath = apiDescriptionGroup.Key, Apis = apis, Models = complexModels }; }
/// <summary> /// Validates a group of operations with the same context Uri. /// </summary> /// <param name="operations">Operations to validate.</param> private void ValidateOperationMetadataGroup(IGrouping<string, ODataOperation> operations) { Debug.Assert(operations != null, "operations must not be null."); Debug.Assert(operations.Any(), "operations.Any()"); Debug.Assert(operations.All(o => this.GetOperationMetadataString(o) == operations.Key), "The operations should be grouped by their metadata."); if (operations.Count() > 1 && operations.Any(o => o.Target == null)) { throw new ODataException(OData.Core.Strings.ODataJsonLightEntryAndFeedSerializer_ActionsAndFunctionsGroupMustSpecifyTarget(operations.Key)); } foreach (IGrouping<string, ODataOperation> operationsByTarget in operations.GroupBy(this.GetOperationTargetUriString)) { if (operationsByTarget.Count() > 1) { throw new ODataException(OData.Core.Strings.ODataJsonLightEntryAndFeedSerializer_ActionsAndFunctionsGroupMustNotHaveDuplicateTarget(operations.Key, operationsByTarget.Key)); } } }
private static bool RunTestClass(IMessageSink messageSink, IGrouping<ITypeInfo, XunitTestCase> group, RunSummary classSummary) { bool cancelled = false; var aggregator = new ExceptionAggregator(); Type testClassType = ((IReflectionTypeInfo)group.Key).Type; Dictionary<Type, object> fixtureMappings = new Dictionary<Type, object>(); List<object> constructorArguments = new List<object>(); // TODO: Read class fixtures from test collection foreach (var iface in testClassType.GetInterfaces().Where(i => i.IsGenericType && i.GetGenericTypeDefinition() == typeof(IClassFixture<>))) { Type fixtureType = iface.GetGenericArguments().Single(); object fixture = null; aggregator.Run(() => fixture = Activator.CreateInstance(fixtureType)); fixtureMappings.Add(fixtureType, fixture); } var ctors = testClassType.GetConstructors(); if (ctors.Length != 1) { aggregator.Add(new TestClassException("A test class may only define a single public constructor.")); } else { var ctor = ctors.Single(); List<string> unusedArguments = new List<string>(); foreach (var paramInfo in ctor.GetParameters()) { object fixture; if (fixtureMappings.TryGetValue(paramInfo.ParameterType, out fixture)) constructorArguments.Add(fixture); else unusedArguments.Add(paramInfo.ParameterType.Name + " " + paramInfo.Name); } if (unusedArguments.Count > 0) aggregator.Add(new TestClassException("The following constructor arguments did not have matching fixture data: " + String.Join(", ", unusedArguments))); } var methodGroups = group.GroupBy(tc => tc.Method); foreach (var method in methodGroups) { if (!messageSink.OnMessage(new TestMethodStarting { ClassName = group.Key.Name, MethodName = method.Key.Name })) cancelled = true; else cancelled = RunTestMethod(messageSink, constructorArguments.ToArray(), method, classSummary, aggregator); if (!messageSink.OnMessage(new TestMethodFinished { ClassName = group.Key.Name, MethodName = method.Key.Name })) cancelled = true; if (cancelled) break; } foreach (var fixture in fixtureMappings.Values.OfType<IDisposable>()) { try { fixture.Dispose(); } catch (Exception ex) { if (!messageSink.OnMessage(new ErrorMessage(ex.Unwrap()))) cancelled = true; } } return cancelled; }
private ApiDeclaration GenerateDeclaration(IGrouping<string, ApiDescription> apiDescriptionGroup) { var modelSpecMap = new ModelSpecMap(_customTypeMappings); // Group further by relative path - each group corresponds to an ApiSpec var apiSpecs = apiDescriptionGroup .GroupBy(apiDesc => apiDesc.RelativePath) .Select(apiDescGrp => GenerateApiSpec(apiDescGrp, modelSpecMap)) .ToList(); var complexModelSpecs = modelSpecMap.GetAll() .Where(modelSpec => modelSpec.Type == "object") .ToDictionary(modelSpec => modelSpec.Id, modelSpec => modelSpec); return new ApiDeclaration { ApiVersion = "1.0", SwaggerVersion = SwaggerVersion, BasePath = _basePathResolver().TrimEnd('/'), ResourcePath = apiDescriptionGroup.Key, Apis = apiSpecs, Models = complexModelSpecs }; }
/// <summary> /// Assigns the severity and environment severity list. /// </summary> /// <param name="environment">The environment.</param> /// <param name="bugItem">The bug item.</param> private static void AssignSeverityAndEnvironmentSeverityList(IGrouping<byte?, ProjectPortalBugsBySeverityAndEnvironment> environment, EnvironmentBugList bugItem) { var severityGroupedBugList = environment.GroupBy(bugType => bugType.Severity); foreach (var severity in severityGroupedBugList) { SeverityBug severityItem = new SeverityBug(); severityItem.WorkItemCount = severity.Sum(bugType => bugType.WorkItemCount) ?? 0; switch (severity.Key) { case SeverityCritical: bugItem.CriticalBug = severityItem; break; case SeverityHigh: bugItem.HighBug = severityItem; break; case SeverityMedium: bugItem.MediumBug = severityItem; break; case SeverityLow: bugItem.LowBug = severityItem; break; } } }
private void ProcessBout(IGrouping<int, Jam> boutJamSet, IGrouping<int, PenaltyGroup> penaltyGroups, Dictionary<int, JamTimeEstimate> estimateMap) { // first, we do the approximations var periods = boutJamSet.GroupBy(j => j.IsFirstHalf); foreach (IEnumerable<Jam> jams in periods) { var jamIDs = jams.Select(j => j.ID); int totalSeconds = 1800; // we'll assume four clock stoppages per half, then add time in as necessary totalSeconds -= 30 * (jams.Count() - 5); int minTime = 0; int maxTime = 0; foreach (int jamID in jamIDs) { minTime += estimateMap[jamID].Minimum; maxTime += estimateMap[jamID].Maximum; } totalSeconds -= minTime; if (totalSeconds < 0) { // just assume the min time for everything foreach (Jam jam in jams) { var jamLimit = estimateMap[jam.ID]; jamLimit.Estimate = jamLimit.Minimum; } } else { int difference = maxTime - minTime; double ratio = ((double)(totalSeconds)) / difference; if (ratio > 1.0) { Console.WriteLine("Bout: " + jams.First().BoutID + " has a period that the maximums don't fill"); ratio = 1.0; } foreach (Jam jam in jams) { var jamLimit = estimateMap[jam.ID]; jamLimit.Estimate = jamLimit.Minimum + (int)((jamLimit.Maximum - jamLimit.Minimum) * ratio); } } } }
/// <summary> /// @@@ to develop /// We have now established all necessary curve request. /// This function removes duplicates to minimize requests to external sources; /// The check against existing data is done afterwards in a different function; /// Removing duplicates is quite important as it reduces computational time considerably. /// /// Logic: /// Imagine the space "forward dates x market dates" for a specific granularity /// Add the 1st ReportItem, compare the 2nd against it, if overlaping adjust the 1st, otherwise add 2nd and so on. /// Complexity is high, only do by development and forward for now @@@ /// </summary> private static void OptimizeCurveReportItems(IGrouping<string, CurveReportItem> productGroup) { List<CurveReportItem> alreadyOptimizedItems = new List<CurveReportItem>(); CurveReportItem runner; CurveReportItem residual; IEnumerable<IGrouping<int, CurveReportItem>> tenorGroups = productGroup.GroupBy(k => k.ParentCurveReport.TenorType); foreach (var group in tenorGroups) { Stack<CurveReportItem> stackToOptimize = (Stack<CurveReportItem>)(group.OrderBy(k => k.DataPoints)); alreadyOptimizedItems.Add(stackToOptimize.Pop()); // add first item [@@@ check: how was it ordered, smallest or largest on top?] foreach (var item in stackToOptimize.ToList()) //.ToList as we would otherwise modify the stack while using it { runner = stackToOptimize.Pop(); } } }
/// <summary> /// Gibt Auftraege zurueck, die dem Rechnungstyp entsprechen /// </summary> /// <param name="type">Rechnungstyp</param> /// <param name="groupedOrder">Auftragsgruppe</param> /// <returns></returns> private IEnumerable<IGrouping<string, VirtualOrder>> getCurrentInvoiceType(InvoiceTypes type, IGrouping<string, VirtualOrder> groupedOrder) { IEnumerable<IGrouping<string, VirtualOrder>> _invoiceTypeGroup=null; if ((type != null) && type.InvoiceTypeName == "Sammelrechnung") { _invoiceTypeGroup = groupedOrder.GroupBy(q => q.LocationName.ToString()); } if ((type != null) && type.InvoiceTypeName == "Einzelrechnung") { _invoiceTypeGroup = groupedOrder.GroupBy(q => q.OrderNumber.ToString()); } if ((type != null) && type.InvoiceTypeName == "Wochenrechnung") { _invoiceTypeGroup = groupedOrder.GroupBy(q => Math.Floor((decimal)q.ExecutionDate.Value.DayOfYear / 7).ToString()); } if ((type != null) && type.InvoiceTypeName == "Monatsrechnung") { _invoiceTypeGroup = groupedOrder.GroupBy(q => q.ExecutionDate.Value.Month.ToString()); } return _invoiceTypeGroup; }
/// <summary> /// Assigns the environment and type list. /// </summary> /// <param name="environment">The environment.</param> /// <param name="bugItem">The bug item.</param> private static void AssignEnvironmentAndTypeList(IGrouping<byte?, ProjectPortalBugsByEnvironmentAndBugType> environment, EnvironmentBugList bugItem) { var severityGroupedBugList = environment.GroupBy(bugType => bugType.BugType); foreach (var severity in severityGroupedBugList) { SeverityBug severityItem = new SeverityBug(); severityItem.WorkItemCount = severity.Sum(bugType => bugType.WorkItemCount) ?? 0; switch (severity.Key) { case CodeDefect: bugItem.CodeDefect = severityItem; break; case SpecIssue: bugItem.SpecIssue = severityItem; break; case Suggestion: bugItem.Suggestion = severityItem; break; } } }
/// <summary> /// Assigns the severity and bug type severity list. /// </summary> /// <param name="bugType">Type of the bug.</param> /// <param name="bugItem">The bug item.</param> private static void AssignSeverityAndBugTypeSeverityList(IGrouping<string, ProjectPortalBugsBySeverityAndBugType> bugType, EnvironmentBugList bugItem) { var severityGroupedBugList = bugType.GroupBy(bugTypeItem => bugTypeItem.Severity); foreach (var severityItem in severityGroupedBugList) { var severityBug = new SeverityBug(); severityBug.WorkItemCount = severityItem.Sum(bugTypeItem => bugTypeItem.WorkItemCount) ?? 0; switch (severityItem.Key) { case SeverityCritical: bugItem.CriticalBug = severityBug; break; case SeverityHigh: bugItem.HighBug = severityBug; break; case SeverityMedium: bugItem.MediumBug = severityBug; break; case SeverityLow: bugItem.LowBug = severityBug; break; } } }