public void ExecuteTasks(HashSet<EndRequestTask> tasks) { var tasksGroupedByType = tasks.GroupBy(task => task.GetType()) .ToDictionary(grouping => grouping.Key, grouping => grouping.ToHashSet()); foreach (var type in tasksGroupedByType.Keys.OrderByDescending(GetExecutionPriority)) { if (OnRequestExecutionTypes.ContainsKey(type) && OnRequestExecutionTypes[type] != null) { var requestBase = _kernel.Get(OnRequestExecutionTypes[type]) as ExecuteEndRequestBase; if (requestBase != null) { var data = tasksGroupedByType[type].Select(task => task.BaseData).ToHashSet(); requestBase.Execute(data); continue; } } CurrentRequestData.ErrorSignal.Raise( new Exception( string.Format( "Could not process tasks of type {0}. Please create a valid executor for the type", type.FullName))); } }
public static void SomeFunction() { Dictionary<int, int> dict = new Dictionary<int, int>(); dict.Add(4, 3); Console.WriteLine(dict[4]); Console.WriteLine(dict.ContainsKey(8)); dict.Remove(4); foreach(int key in dict.Keys) Console.WriteLine(key); foreach(int val in dict.Values) Console.WriteLine(val); foreach(var kv in dict) Console.WriteLine(kv.Key + " " + kv.Value); var dict2 = dict.ToDictionary(o => o.Key, o => o.Value); var vals = dict.Values; HashSet<int> hash = new HashSet<int>(); hash.Add(999); Console.WriteLine(hash.Contains(999)); hash.Remove(999); Console.WriteLine(hash.Contains(999)); foreach(int hashItem in hash) Console.WriteLine(hashItem); var z = hash.Select(o => 3).ToArray(); var g = hash.GroupBy(o => o).Select(o => o.Count()).Min(); }
public Item GetVinhoRecomendado(int idCliente) { var vinhosComprados = new HashSet <Item>(); _compras.Where(compra => compra.Cliente.Id == idCliente).ToList() .ForEach(compra => vinhosComprados.UnionWith(compra.Itens)); var vinhoPreferido = vinhosComprados?.GroupBy(item => new { item.Variedade, item.Categoria }) ?.OrderByDescending(item => item.Count()) ?.Select(item => item.Key).FirstOrDefault(); var vinhosNuncaComprados = _itens.Where(item => !vinhosComprados.Contains(item)) .OrderByDescending(item => item.Preco); var vinhoRecomendado = vinhosNuncaComprados? .Where(item => item.Variedade == vinhoPreferido?.Variedade && item.Categoria == vinhoPreferido?.Categoria) .FirstOrDefault(); if (vinhoRecomendado == null) { vinhoRecomendado = vinhosNuncaComprados? .Where(item => item.Variedade == vinhoPreferido?.Variedade || item.Categoria == vinhoPreferido?.Categoria) .FirstOrDefault(); } return(vinhoRecomendado); }
private async Task DeleteInDestination(HashSet<Tuple<string, string, string>> src, HashSet<Tuple<string, string, string>> dst) { dst.ExceptWith(src); int n = 0; // group by table + partition foreach (var batch1 in dst.GroupBy(x => x.Item1 + x.Item2)) { CloudTable dstTable = _dstClient.GetTableReference(batch1.First().Item1); if (_token.IsCancellationRequested) return; foreach (var batch2 in batch1.Batch(100)) { if (_token.IsCancellationRequested) return; var op = new TableBatchOperation(); foreach (var tuple in batch2) { op.Delete(new TableEntity(tuple.Item2, tuple.Item3) {ETag = "*"}); } await dstTable.ExecuteBatchAsync(op, _token); n += Math.Min(op.Count, 100); Console.WriteLine("deleted {0} rows", n); } } }
/// <summary> /// Resolver context /// </summary> /// <param name="dependencyBehavior">behavior for non-target packages</param> /// <param name="targetIds">packages to install or update</param> /// <param name="requiredPackageIds">packages required in the solution</param> /// <param name="packagesConfig">existing packages</param> /// <param name="preferredVersions">preferred package versions or the installed version of a package</param> /// <param name="availablePackages">all packages from the gather stage</param> public PackageResolverContext(DependencyBehavior dependencyBehavior, IEnumerable<string> targetIds, IEnumerable<string> requiredPackageIds, IEnumerable<Packaging.PackageReference> packagesConfig, IEnumerable<PackageIdentity> preferredVersions, IEnumerable<SourcePackageDependencyInfo> availablePackages) { if (targetIds == null) { throw new ArgumentNullException(nameof(targetIds)); } if (requiredPackageIds == null) { throw new ArgumentNullException(nameof(requiredPackageIds)); } if (packagesConfig == null) { throw new ArgumentNullException(nameof(packagesConfig)); } if (preferredVersions == null) { throw new ArgumentNullException(nameof(preferredVersions)); } if (availablePackages == null) { throw new ArgumentNullException(nameof(availablePackages)); } DependencyBehavior = dependencyBehavior; TargetIds = new HashSet<string>(targetIds, StringComparer.OrdinalIgnoreCase); RequiredPackageIds = new HashSet<string>(requiredPackageIds, StringComparer.OrdinalIgnoreCase); RequiredPackageIds.UnionWith(targetIds); PackagesConfig = packagesConfig; PreferredVersions = new HashSet<PackageIdentity>(preferredVersions, PackageIdentity.Comparer); AvailablePackages = availablePackages; Debug.Assert(PreferredVersions.GroupBy(p => p.Id, StringComparer.OrdinalIgnoreCase) .All(group => group.Count() == 1), "duplicate preferred ids"); }
public void HiloCannotGoDown() { using (var store = NewDocumentStore()) { store.DatabaseCommands.Put( "Raven/Hilo/Users", null, new RavenJObject { {"Max", 32} }, new RavenJObject()); var hiLoKeyGenerator = new HiLoKeyGenerator("Users", 32); var ids = new HashSet<long> { hiLoKeyGenerator.NextId(store.DatabaseCommands) }; store.DatabaseCommands.Put( "Raven/Hilo/Users", null, new RavenJObject { {"Max", 12} }, new RavenJObject()); for (int i = 0; i < 128; i++) { Assert.True(ids.Add(hiLoKeyGenerator.NextId(store.DatabaseCommands)), "Failed at " + i); } var list = ids.GroupBy(x => x).Select(g => new { g.Key, Count = g.Count() }).Where(x => x.Count > 1).ToList(); Assert.Empty(list); } }
private static void DoWatcherChanged(object state) { try { if (IsCompiling) { //延迟处理 _changeWatchingTimer.Change(_settupInfo.ScriptChangedDelay, Timeout.Infinite); return; } HashSet <string> tmp = new HashSet <string>(); var changedFiles = Interlocked.Exchange <HashSet <string> >(ref _changedFiles, tmp); _changeWatchingTimer.Change(Timeout.Infinite, Timeout.Infinite); HashSet <string> localChangedFiles = new HashSet <string>(); foreach (var file in changedFiles) { //check file changed for md5 encode. if (_runtimeDomain.Scope.VerifyScriptHashCode(file)) { continue; } localChangedFiles.Add(file); } TraceLog.WriteLine("Update script file count:{0}.", localChangedFiles.Count); if (localChangedFiles.Count == 0) { return; } bool hasModelFile = false; //以文件类型分组 var changeGroup = localChangedFiles.GroupBy(t => { if (!hasModelFile && _runtimeDomain.Scope.IsModelScript(t)) { hasModelFile = true; } return(Path.GetExtension(t)); }) .OrderBy(t => t.Key); bool isLoop = true; foreach (var group in changeGroup) { if (!isLoop) { break; } string ext = group.Key.ToLower(); switch (ext) { case ".cs": //star compile if (hasModelFile) { TraceLog.WriteLine("{1} {0} compile start...", "model script", DateTime.Now.ToString("HH:mm:ss")); var scope = InitScriptRuntimeScope(); PrintCompiledMessage("model script", scope != null && scope.ModelAssembly != null ? scope.ModelAssembly.FullName : "null"); isLoop = false; } else { if (Interlocked.Exchange(ref _isCompiling, 1) == 0) { try { var str = group.Last(); TraceLog.WriteLine("{1} {0} compile start...", "csharp script", DateTime.Now.ToString("HH:mm:ss")); _runtimeDomain.Scope.InitCsharp(); _runtimeDomain.MainInstance = (dynamic)_runtimeDomain.Scope.Execute(_settupInfo.ScriptMainProgram, typeName: _settupInfo.ScriptMainTypeName); RunMainProgram(); // _runtimeDomain.MainInstance.Start(_runtimeDomain.MainArgs); PrintCompiledMessage("csharp script"); } finally { Interlocked.Exchange(ref _isCompiling, 0); } } else { TraceLog.WriteLine("{1} {0} has not compiled in other thread.", "csharp script", DateTime.Now.ToString("HH:mm:ss")); } } break; case ".py": _runtimeDomain.Scope.InitPython(group.ToArray()); PrintCompiledMessage("python script"); break; case ".lua": _runtimeDomain.Scope.InitLua(); PrintCompiledMessage("lua script"); break; default: throw new NotSupportedException(string.Format("Script type \"{0}\" not supported.", ext)); } DoScriptLoaded(ext, group.ToArray()); } } catch (Exception ex) { TraceLog.WriteError("DoWatcherChanged error:{0}", ex); } }
static string GetTypescript(List <Assembly> assemblies) { var types = assemblies.Distinct().SelectMany(a => a.GetTypes()).Where(x => filterType(x, AllowGeneric)).OrderBy(x => x.Namespace) .GroupBy(x => GetNameWithoutGenericArity(x.ToString())) .Select(x => x.OrderByDescending(x => x.GetGenericArguments().Length).First()) .Append(null); var sb = new StringBuilder(); var nsStack = new Stack <string>(); var n = "\n"; string spaces(int depth = 0) { if ((nsStack.Count + depth) == 0) { return(""); } return(new String(' ', (nsStack.Count + depth) * 2)); } foreach (var type in types) { var lastNs = nsStack.Count > 0 ? nsStack.Peek() : ""; var ns = type?.Namespace ?? ""; while (lastNs != ns) { if (nsStack.Count == 0 || ns.Contains(lastNs + ".")) { // Go deeper var nsName = string.IsNullOrWhiteSpace(lastNs) ? ns : ns.Replace(lastNs + ".", ""); var splits = nsName.Split('.'); var curName = lastNs; foreach (var split in splits) { var topLevel = nsStack.Count == 0; var declareOrNot = topLevel ? "declare " : ""; curName = string.IsNullOrWhiteSpace(curName) ? split : $"{curName}.{split}"; sb.Append($"{spaces()}export {declareOrNot}namespace {split} {{{n}"); nsStack.Push(curName); } lastNs = ns; } else { nsStack.Pop(); lastNs = nsStack.Count > 0 ? nsStack.Peek() : ""; sb.Append($"{spaces()}}}{n}"); } } if (type == null) { break; } var bl = spaces(); var bl1 = spaces(1); if (type.IsEnum) { sb.Append($"{bl}export enum {getTypesScriptType(type, false, true)} {{{n}"); var fields = type.GetFields().Where(x => x.Name != "value__"); foreach (var info in fields) { sb.Append($"{bl1}{info.Name} = {getTypeScriptValue(info.GetRawConstantValue())},{n}"); } } else { sb.Append($"{bl}{(ExportAsClass && !type.IsInterface ? "export class" : "export interface")} {getTypesScriptType(type, false, true, AllowGeneric, " = any")} {{{n}"); if (ExportAsClass) { var ctors = type.GetConstructors(BindingFlags.Instance | BindingFlags.Public).Where(x => !x.GetParameters().Any(p => p.ParameterType.IsByRef || p.ParameterType.IsPointer)); foreach (var info in ctors) { sb.Append($"{bl1}{getTypeScriptString(info)}{n}"); } } var props = type.GetProperties(BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public) .Where(x => !x.IsSpecialName && x.GetIndexParameters().Length == 0 && !x.PropertyType.IsPointer); var fields = type.GetFields(BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public) .Where(x => !x.IsSpecialName && !x.FieldType.IsPointer); var methods = type.GetMethods(BindingFlags.Instance | BindingFlags.Static | BindingFlags.Public) .Where(x => !x.IsSpecialName && !(x.ReturnType.IsByRef || x.ReturnType.IsPointer) && !x.GetParameters().Any(p => p.ParameterType.IsByRef || p.ParameterType.IsPointer) && !x.IsGenericMethod); var methodsGrouped = methods.GroupBy(x => x.Name); foreach (var info in props) { sb.Append($"{bl1}{getTypeScriptString(info)}{n}"); } foreach (var info in fields) { sb.Append($"{bl1}{getTypeScriptString(info)}{n}"); } //foreach (var info in methodsGrouped) // sb.Append($"{bl1}{getTypeScriptString(info)}{n}"); foreach (var info in methods) { sb.Append($"{bl1}{getTypeScriptString(info)}{n}"); } } sb.Append($"{bl}}}{n}"); } var importGroups = Imports.GroupBy(x => ImportNamespaces[x]); return($"//{n}" + $"// Types in assemblies: {string.Join(", ", assemblies.Select(x => x.GetName().Name))}{n}" + $"// Generated {DateTime.Now}{n}" + $"//{n}" + $"{string.Join(n, importGroups.Select(x => $"import {{ {string.Join(",", x)} }} from './{x.Key}';"))}{n}" + n + sb); }
/// <summary> /// Executes orders for a turn. /// </summary> public bool ExecuteOrders() { bool didStuff = false; if (AreOrdersOnHold) { return(didStuff); } UnspentRate = Rate; var empty = new ResourceQuantity(); var builtThisTurn = new HashSet <IConstructable>(); bool done = false; while (!done && Orders.Any() && (Owner.StoredResources > empty || UpcomingSpending.IsEmpty)) { var numOrders = Orders.Count; var spentThisRound = new ResourceQuantity(); foreach (var order in Orders.Cast <IConstructionOrder>().ToArray()) { if (order == null) { // WTF Orders.Remove(order); continue; } var reasonForNotBuilding = GetReasonForBeingUnableToConstruct(order.Template); if (reasonForNotBuilding != null) { // can't build that here! Orders.RemoveAt(0); Owner.Log.Add(Container.CreateLogMessage(order.Template + " cannot be built at " + this + " because " + reasonForNotBuilding, LogMessages.LogMessageType.Error)); } else { var oldProgress = new ResourceQuantity(order.Item?.ConstructionProgress); order.Execute(this); var newProgress = new ResourceQuantity(order.Item?.ConstructionProgress); if (newProgress < (order.Item?.Cost ?? new ResourceQuantity()) && newProgress == oldProgress && order == Orders.Last()) { done = true; // made no progress and nothing else to try and build } if (order.CheckCompletion(this)) { // upgrade facility orders place their own facilities if (!(order is UpgradeFacilityOrder)) { order.Item.Place(Container); } Orders.Remove(order); if (AreRepeatOrdersEnabled) { var copy = order.Copy <IConstructionOrder>(); copy.Reset(); Orders.Add(copy); } builtThisTurn.Add(order.Item); if (order.Item is Ship || order.Item is Base) { // trigger ship built happiness changes Owner.TriggerHappinessChange(hm => hm.AnyShipConstructed); if (Container is Planet p) { p.Colony.TriggerHappinessChange(hm => hm.ShipConstructed); } } if (order.Item is Facility) { // trigger facility built happiness changes if (Container is Planet p) { p.Colony.TriggerHappinessChange(hm => hm.FacilityConstructed); } } } } } didStuff = true; if (!AreRepeatOrdersEnabled) { done = true; } } foreach (var g in builtThisTurn.GroupBy(i => i.Template)) { if (g.Count() == 1) { Owner.Log.Add(g.First().CreateLogMessage(g.First() + " has been constructed at " + Name + ".", logMessageType: LogMessages.LogMessageType.ConstructionComplete)); } else { Owner.Log.Add(g.First().CreateLogMessage(g.Count() + "x " + g.Key + " have been constructed at " + Name + ".", logMessageType: LogMessages.LogMessageType.ConstructionComplete)); } } return(didStuff); }
// The method which performs rename, resolves the conflict locations and returns the result of the rename operation public async Task <ConflictResolution> ResolveConflictsAsync() { try { await FindDocumentsAndPossibleNameConflicts().ConfigureAwait(false); var baseSolution = _renameLocationSet.Solution; // Process rename one project at a time to improve caching and reduce syntax tree serialization. var documentsGroupedByTopologicallySortedProjectId = _documentsIdsToBeCheckedForConflict .GroupBy(d => d.ProjectId) .OrderBy(g => _topologicallySortedProjects.IndexOf(g.Key)); _replacementTextValid = IsIdentifierValid_Worker(baseSolution, _replacementText, documentsGroupedByTopologicallySortedProjectId.Select(g => g.Key), _cancellationToken); var renamedSpansTracker = new RenamedSpansTracker(); var conflictResolution = new ConflictResolution(baseSolution, renamedSpansTracker, _replacementText, _replacementTextValid); foreach (var documentsByProject in documentsGroupedByTopologicallySortedProjectId) { var documentIdsThatGetsAnnotatedAndRenamed = new HashSet <DocumentId>(documentsByProject); using (baseSolution.Services.CacheService?.EnableCaching(documentsByProject.Key)) { // Rename is going to be in 4 phases. // 1st phase - Does a simple token replacement // If the 1st phase results in conflict then we perform then: // 2nd phase is to expand and simplify only the reference locations with conflicts // 3rd phase is to expand and simplify all the conflict locations (both reference and non-reference) // If there are unresolved Conflicts after the 3rd phase then in 4th phase, // We complexify and resolve locations that were resolvable and for the other locations we perform the normal token replacement like the first the phase. for (int phase = 0; phase < 4; phase++) { // Step 1: // The rename process and annotation for the bookkeeping is performed in one-step // The Process in short is, // 1. If renaming a token which is no conflict then replace the token and make a map of the oldspan to the newspan // 2. If we encounter a node that has to be expanded( because there was a conflict in previous phase), we expand it. // If the node happens to contain a token that needs to be renamed then we annotate it and rename it after expansion else just expand and proceed // 3. Through the whole process we maintain a map of the oldspan to newspan. In case of expansion & rename, we map the expanded node and the renamed token conflictResolution.UpdateCurrentSolution(await AnnotateAndRename_WorkerAsync( baseSolution, conflictResolution.NewSolution, documentIdsThatGetsAnnotatedAndRenamed, _renameLocationSet.Locations, renamedSpansTracker, _replacementTextValid).ConfigureAwait(false)); // Step 2: Check for conflicts in the renamed solution bool foundResolvableConflicts = await IdentifyConflictsAsync( documentIdsForConflictResolution : documentIdsThatGetsAnnotatedAndRenamed, allDocumentIdsInProject : documentsByProject, projectId : documentsByProject.Key, conflictResolution : conflictResolution).ConfigureAwait(false); if (!foundResolvableConflicts || phase == 3) { break; } if (phase == 0) { _conflictLocations = conflictResolution.RelatedLocations .Where(loc => (documentIdsThatGetsAnnotatedAndRenamed.Contains(loc.DocumentId) && loc.Type == RelatedLocationType.PossiblyResolvableConflict && loc.IsReference)) .Select(loc => new ConflictLocationInfo(loc)) .ToSet(); // If there were no conflicting locations in references, then the first conflict phase has to be skipped. if (_conflictLocations.Count == 0) { phase++; } } if (phase == 1) { _conflictLocations = _conflictLocations.Concat(conflictResolution.RelatedLocations .Where(loc => documentIdsThatGetsAnnotatedAndRenamed.Contains(loc.DocumentId) && loc.Type == RelatedLocationType.PossiblyResolvableConflict) .Select(loc => new ConflictLocationInfo(loc))) .ToSet(); } // Set the documents with conflicts that need to be processed in the next phase. // Note that we need to get the conflictLocations here since we're going to remove some locations below if phase == 2 documentIdsThatGetsAnnotatedAndRenamed = new HashSet <DocumentId>(_conflictLocations.Select(l => l.DocumentId)); if (phase == 2) { // After phase 2, if there are still conflicts then remove the conflict locations from being expanded var unresolvedLocations = conflictResolution.RelatedLocations .Where(l => (l.Type & RelatedLocationType.UnresolvedConflict) != 0) .Select(l => Tuple.Create(l.ComplexifiedTargetSpan, l.DocumentId)).Distinct(); _conflictLocations = _conflictLocations.Where(l => !unresolvedLocations.Any(c => c.Item2 == l.DocumentId && c.Item1.Contains(l.OriginalIdentifierSpan))).ToSet(); } // Clean up side effects from rename before entering the next phase conflictResolution.ClearDocuments(documentIdsThatGetsAnnotatedAndRenamed); } // Step 3: Simplify the project conflictResolution.UpdateCurrentSolution(await renamedSpansTracker.SimplifyAsync(conflictResolution.NewSolution, documentsByProject, _replacementTextValid, _renameAnnotations, _cancellationToken).ConfigureAwait(false)); await conflictResolution.RemoveAllRenameAnnotationsAsync(documentsByProject, _renameAnnotations, _cancellationToken).ConfigureAwait(false); } } // This rename could break implicit references of this symbol (e.g. rename MoveNext on a collection like type in a // foreach/for each statement ISymbol renamedSymbolInNewSolution = await GetRenamedSymbolInCurrentSolutionAsync(conflictResolution).ConfigureAwait(false); if (IsRenameValid(conflictResolution, renamedSymbolInNewSolution)) { AddImplicitConflicts( renamedSymbolInNewSolution, _renameLocationSet.Symbol, _renameLocationSet.ImplicitLocations, await conflictResolution.NewSolution.GetDocument(_documentIdOfRenameSymbolDeclaration).GetSemanticModelAsync(_cancellationToken).ConfigureAwait(false), _renameSymbolDeclarationLocation, renamedSpansTracker.GetAdjustedPosition(_renameSymbolDeclarationLocation.SourceSpan.Start, _documentIdOfRenameSymbolDeclaration), conflictResolution, _cancellationToken); } foreach (var relatedLocation in conflictResolution.RelatedLocations) { if (relatedLocation.Type == RelatedLocationType.PossiblyResolvableConflict) { relatedLocation.Type = RelatedLocationType.UnresolvedConflict; } } #if DEBUG await DebugVerifyNoErrorsAsync(conflictResolution, _documentsIdsToBeCheckedForConflict).ConfigureAwait(false); #endif return(conflictResolution); } catch (Exception e) when(FatalError.ReportUnlessCanceled(e)) { throw ExceptionUtilities.Unreachable; } }
public IEnumerable <IGrouping <string, Literal> > GetAllLiterals() { return(_clause.GroupBy(l => l.Name)); }
/// <summary> /// Generates a syntax tree for the provided assemblies. /// </summary> /// <param name="assemblies">The assemblies to generate code for.</param> /// <param name="runtime">Whether or not runtime code generation is being performed.</param> /// <returns>The generated syntax tree.</returns> private static GeneratedSyntax GenerateForAssemblies(List <Assembly> assemblies, bool runtime) { if (Logger.IsVerbose) { Logger.Verbose( "Generating code for assemblies: {0}", string.Join(", ", assemblies.Select(_ => _.FullName))); } Assembly targetAssembly; HashSet <Type> ignoredTypes; if (runtime) { // Ignore types which have already been accounted for. ignoredTypes = CodeGeneratorCommon.GetTypesWithImplementations( typeof(MethodInvokerAttribute), typeof(GrainReferenceAttribute), typeof(GrainStateAttribute), typeof(SerializerAttribute)); targetAssembly = null; } else { ignoredTypes = new HashSet <Type>(); targetAssembly = assemblies.FirstOrDefault(); } var members = new List <MemberDeclarationSyntax>(); // Get types from assemblies which reference Orleans and are not generated assemblies. var includedTypes = new HashSet <Type>(); foreach (var type in assemblies.SelectMany(_ => _.DefinedTypes)) { // The module containing the serializer. var module = runtime ? null : type.Module; var typeInfo = type.GetTypeInfo(); // Every type which is encountered must be considered for serialization. if (!typeInfo.IsNested && !typeInfo.IsGenericParameter && typeInfo.IsSerializable) { // If a type was encountered which can be accessed, process it for serialization. var isAccessibleForSerialization = !TypeUtilities.IsTypeIsInaccessibleForSerialization(type, module, targetAssembly); if (isAccessibleForSerialization) { includedTypes.Add(type); SerializerGenerationManager.RecordTypeToGenerate(type); } } // Collect the types which require code generation. if (GrainInterfaceData.IsGrainInterface(type)) { if (Logger.IsVerbose2) { Logger.Verbose2("Will generate code for: {0}", type.GetParseableName()); } includedTypes.Add(type); } } includedTypes.RemoveWhere(_ => ignoredTypes.Contains(_)); // Group the types by namespace and generate the required code in each namespace. foreach (var group in includedTypes.GroupBy(_ => CodeGeneratorCommon.GetGeneratedNamespace(_))) { var namespaceMembers = new List <MemberDeclarationSyntax>(); foreach (var type in group) { // The module containing the serializer. var module = runtime ? null : type.Module; // Every type which is encountered must be considered for serialization. Action <Type> onEncounteredType = encounteredType => { // If a type was encountered which can be accessed, process it for serialization. var isAccessibleForSerialization = !TypeUtilities.IsTypeIsInaccessibleForSerialization(encounteredType, module, targetAssembly); if (isAccessibleForSerialization) { SerializerGenerationManager.RecordTypeToGenerate(encounteredType); } }; if (Logger.IsVerbose2) { Logger.Verbose2("Generating code for: {0}", type.GetParseableName()); } if (GrainInterfaceData.IsGrainInterface(type)) { if (Logger.IsVerbose2) { Logger.Verbose2( "Generating GrainReference and MethodInvoker for {0}", type.GetParseableName()); } GrainInterfaceData.ValidateInterfaceRules(type); namespaceMembers.Add(GrainReferenceGenerator.GenerateClass(type, onEncounteredType)); namespaceMembers.Add(GrainMethodInvokerGenerator.GenerateClass(type)); } // Generate serializers. var first = true; Type toGen; while (SerializerGenerationManager.GetNextTypeToProcess(out toGen)) { // Filter types which are inaccessible by the serialzation module/assembly. var skipSerialzerGeneration = toGen.GetAllFields() .Any( field => TypeUtilities.IsTypeIsInaccessibleForSerialization( field.FieldType, module, targetAssembly)); if (skipSerialzerGeneration) { continue; } if (!runtime) { if (first) { ConsoleText.WriteStatus("ClientGenerator - Generating serializer classes for types:"); first = false; } ConsoleText.WriteStatus( "\ttype " + toGen.FullName + " in namespace " + toGen.Namespace + " defined in Assembly " + toGen.Assembly.GetName()); } if (Logger.IsVerbose2) { Logger.Verbose2( "Generating & Registering Serializer for Type {0}", toGen.GetParseableName()); } namespaceMembers.AddRange(SerializerGenerator.GenerateClass(toGen, onEncounteredType)); } } if (namespaceMembers.Count == 0) { if (Logger.IsVerbose) { Logger.Verbose2("Skipping namespace: {0}", group.Key); } continue; } members.Add( SF.NamespaceDeclaration(SF.ParseName(group.Key)) .AddUsings( TypeUtils.GetNamespaces(typeof(TaskUtility), typeof(GrainExtensions)) .Select(_ => SF.UsingDirective(SF.ParseName(_))) .ToArray()) .AddMembers(namespaceMembers.ToArray())); } return(new GeneratedSyntax { SourceAssemblies = assemblies, Syntax = members.Count > 0 ? SF.CompilationUnit().AddMembers(members.ToArray()) : null }); }
public IEnumerable <DataObject> GetUntypedEditableReader(ITransaction transaction, IEnumerable <string> readOnlyFields = null) { SchemaObject schemaObject = Schema.Schema.GetSchemaObject(DataObjectType); HashSet <string> fields = new HashSet <string>(); foreach (Schema.Field field in schemaObject.GetFields()) { fields.Add(field.FieldName); } fields.AddRange(readOnlyFields); ISelectQuery selectQuery = GetBaseQuery(schemaObject, fields, out Dictionary <string, string> tableAliasesByFieldPath); DataTable table = selectQuery.Execute(transaction); FieldInfo isEditableField = DataObjectType.GetField("isEditable", BindingFlags.NonPublic | BindingFlags.Instance); foreach (DataRow row in table.Rows) { DataObject dataObject = (DataObject)Activator.CreateInstance(DataObjectType); isEditableField.SetValue(dataObject, true); foreach (IGrouping <string, string> fieldByPath in fields.GroupBy(field => { if (field.Contains(".")) { return(field.Substring(0, field.LastIndexOf('.'))); } return(string.Empty); })) { DataObject objectToSetValueOn = dataObject; if (fieldByPath.Key.Contains(".")) { string[] parts = fieldByPath.Key.Split('.'); SchemaObject lastSchemaObject = schemaObject; for (int i = 0; i < parts.Length - 1; i++) { Relationship relationship = lastSchemaObject.GetRelationship(parts[i]); DataObject relatedDataObject = relationship.GetValue(objectToSetValueOn); if (relatedDataObject == null) { relatedDataObject = (DataObject)Activator.CreateInstance(relationship.RelatedObjectType); relationship.SetPrivateDataCallback(objectToSetValueOn, relatedDataObject); } objectToSetValueOn = relatedDataObject; lastSchemaObject = relationship.RelatedSchemaObject; } } string fieldAlias = tableAliasesByFieldPath[fieldByPath.Key]; foreach (string field in fieldByPath) { string fieldName = field; if (fieldName.Contains('.')) { fieldName = fieldName.Substring(fieldName.LastIndexOf('.') + 1); } string columnName = $"{fieldAlias}_{fieldName}"; object databaseValue = row[columnName]; Schema.Field schemaField = schemaObject.GetField(field); schemaField.SetPrivateDataCallback(objectToSetValueOn, databaseValue); } } yield return(dataObject); } }
/// <summary> /// Check for conflicts of versions in the referenced assemblies of the workflow. /// </summary> /// <param name="referencedTypes">Referenced types in the project</param> /// <param name="referencedAssemblies">Referenced assemblies in the project</param> /// <param name="projectName">Name of the main type (workflow) of the project</param> private static void CheckForConflictingVersions(HashSet<Type> referencedTypes, HashSet<Assembly> referencedAssemblies, string projectName) { // XamlBuildTask cannot support two different versions of the same dependency in XAML. // As a workaround, we raise an error here if the workflow contains activities/variables/etc. // from different versions of the same assembly. var conflicts = referencedAssemblies.GroupBy(asm => asm.GetName().Name).Where(grp => grp.Count() > 1).ToList(); if (conflicts.Any()) { var conflict = conflicts.First(); Assembly asm1 = referencedAssemblies.First(item => item.GetName().Name == conflict.Key); Assembly asm2 = referencedAssemblies.Last(item => item.GetName().Name == conflict.Key); var type1 = referencedTypes.First(item => item.Assembly.GetName().Name == asm1.GetName().Name && item.Assembly.GetName().Version == asm1.GetName().Version); var type2 = referencedTypes.First(item => item.Assembly.GetName().Name == asm2.GetName().Name && item.Assembly.GetName().Version == asm2.GetName().Version); string message = string.Format(CompileMessages.MultipleVersionsUsed, type1.Name, asm1.FullName, type2.Name, asm2.FullName, conflict.Key); throw new CompileException(message); } // Check if the workflow contains a previous version of itself var referencesToItself = new List<Assembly>( from assemblies in referencedAssemblies where assemblies.GetName().Name == projectName select assemblies); if (referencesToItself.Any()) { string message = string.Format(CompileMessages.PreviousSelfVersion, referencesToItself.First().GetName().Name, referencesToItself.First().GetName().FullName); throw new CompileException(message); } }
protected void MakeOrder(object sender, EventArgs e) { HashSet <OrderDetailsView> orderList = (HashSet <OrderDetailsView>)Session["tender"]; IEnumerable <IGrouping <string, OrderDetailsView> > groups = orderList.GroupBy(k => k.SupplierId); // Create new Order foreach (var o in groups) { Purchase_Order po = new Purchase_Order(); po.supplier_id = o.Key; po.clerk_user = User.Identity.Name; po.delivery_by = DateTime.Parse(deliverydate.Text); po.order_date = DateTime.Today; po.deliver_address = "1 University Road, #01-00 Store Warehouse, Singapore 786541"; po.deleted = "N"; s.Purchase_Order.Add(po); s.SaveChanges(); // Get OrderID Purchase_Order createdorder = s.Purchase_Order.Where(x => x.clerk_user == User.Identity.Name).OrderBy(x => x.order_id).ToList().Last(); foreach (var item in o) { Purchase_Order_Details orderDetail = new Purchase_Order_Details(); orderDetail.order_id = createdorder.order_id; orderDetail.tender_id = item.TenderId; orderDetail.quantity = item.Quantity; orderDetail.status = "Pending"; orderDetail.cancelled = "N"; orderDetail.deleted = "N"; s.Purchase_Order_Details.Add(orderDetail); s.SaveChanges(); } } UserModel currentUserModel = new UserModel(User.Identity.Name); /* Email logic */ string fromEmail = currentUserModel.Email; string fromName = currentUserModel.Fullname; UserModel deptHead = currentUserModel.FindDelegateOrDeptHead(); string toEmail = deptHead.Email; string toName = deptHead.Fullname; string subject = string.Format("Your purchase order confirmation, {0}", fromName); StringBuilder sb = new StringBuilder(); sb.AppendLine("Dear " + fromName + ","); sb.AppendLine("<br />"); sb.AppendLine("<br />"); sb.AppendLine(string.Format("You have sent a purchase order.")); sb.AppendLine("<br />"); sb.AppendLine("<br />"); sb.AppendLine(string.Format("Please <a href=\"{0}\">follow this link to view the request</a>.", "https://rebrand.ly/ssis-store-receiveorder")); sb.AppendLine("<br />"); sb.AppendLine("<br />"); sb.AppendLine("Thank you."); sb.AppendLine("<br />"); sb.AppendLine("<br />"); sb.AppendLine("<i>This message was auto-generated by the Stationery Store Inventory System.</i>"); string body = sb.ToString(); new Emailer(fromEmail, fromName).SendEmail(fromEmail, fromName, subject, body); /* End of email logic */ Session["tender"] = null; Session["item"] = null; Response.Redirect("~/Views/StoreClerk/PurchaseOrderSuccess.aspx"); }
/// <summary> /// Do the one time scan of all the assemblies /// </summary> private void ScanForAssemblies() { var assemblies = new HashSet <AssemblyLocationInformation>(); if (_applicationConfig.ScanForEmbeddedAssemblies) { foreach (var loadedAssembly in LoadedAssemblies.Where(pair => !AssembliesToIgnore.IsMatch(pair.Key)).Select(pair => pair.Value).ToList()) { string[] resources; try { resources = Resources.GetCachedManifestResourceNames(loadedAssembly); } catch (Exception ex) { Log.Warn().WriteLine(ex, "Couldn't retrieve resources from {0}", loadedAssembly.GetName().Name); continue; } foreach (var resource in resources) { var resourceMatch = _assemblyResourceNameRegex.Match(resource); if (resourceMatch.Success) { assemblies.Add(new AssemblyLocationInformation(resourceMatch.Groups["assembly"].Value, loadedAssembly, resource)); } } } } if (_applicationConfig.UseStrictChecking) { foreach (var applicationConfigScanDirectory in _applicationConfig.ScanDirectories) { if (!Directory.Exists(applicationConfigScanDirectory)) { throw new DirectoryNotFoundException(applicationConfigScanDirectory); } } } foreach (var fileLocation in FileLocations.Scan(_applicationConfig.ScanDirectories, _assemblyFilenameRegex, SearchOption.TopDirectoryOnly)) { assemblies.Add(new AssemblyLocationInformation(fileLocation.Item2.Groups["assembly"].Value, fileLocation.Item1)); } // Reduce step 1) Take from the double assemblies only those which are embedded & on the file system in the probing path foreach (var assemblyGroup in assemblies.GroupBy(information => information.Name).ToList()) { var groupList = assemblyGroup.ToList(); if (groupList.Count <= 1) { continue; } // Remove filesystem assemblies from the list which are not in the AssemblyResolveDirectories var unneededAssemblies = groupList.Where(info => !info.IsEmbedded && !info.IsOnProbingPath).ToList(); if (groupList.Count - unneededAssemblies.Count < 1) { continue; } foreach (var unneededAssemblyInformation in unneededAssemblies) { assemblies.Remove(unneededAssemblyInformation); } } // Reduce step 2) foreach (var assemblyGroup in assemblies.GroupBy(information => information.Name).ToList()) { var groupList = assemblyGroup.ToList(); if (groupList.Count <= 1) { continue; } // Remove assemblies which are older foreach (var unneededAssemblyInformation in groupList.OrderBy(info => info.FileDate).Skip(1).ToList()) { assemblies.Remove(unneededAssemblyInformation); } } // Create the assembly locations foreach (var assemblyLocationInformation in assemblies) { AvailableAssemblies[assemblyLocationInformation.Name] = assemblyLocationInformation; } }
static int Main(string[] args) { #region Command Line argument processing if ( args.Contains ("--help") ) { Console.WriteLine ("This tool depends on Microsoft Office 2010+"); Console.WriteLine ("Valid switches are"); Console.WriteLine ("-ignore <comma separated list of file extn> : Ignore line with pattern"); Console.WriteLine ("-include <comma separated list of file extn> : Filter for pattern"); Console.WriteLine ("-concurrency <No of minutes> : Concurrency Window in minutes"); Console.WriteLine ("-toppages <No of pages> : No of Top Pages/day"); Console.WriteLine ("-peaks <No of peaks> : No of Peak Hours to consider"); Console.WriteLine ("-param <comma seperated list of patterns> : Summarize specific URL parameters"); Console.WriteLine ("-export <export filename> : Excel file report name, default will be with time stamp"); Console.WriteLine ("-folder <log file folder path> : Current folder will be defaulted. All .log files in this folder will be processed."); Console.WriteLine ("Add a space after the pattern if you want extension mapping (e.g. .aspx ,.jpg)"); return 0; } if ( args.Length % 2 != 0 ) { throw new ArgumentException ("Command line arguments not valid, try --help to see valid ones!"); } Dictionary<string, string> cmdArgs = new Dictionary<string, string> (); for ( int i = 0; i < args.Length; i += 2 ) { cmdArgs.Add (args[i].ToLower (), args[i + 1]); } List<string> ignoredTypes = new List<string> (), filterTypes = new List<string> (), hitsPerURLParams = new List<string> (); if ( cmdArgs.ContainsKey (IgnoreSwitch) ) { ignoredTypes = cmdArgs[IgnoreSwitch].ToLower ().Split (',').ToList (); } if ( cmdArgs.ContainsKey (FilterSwitch) ) { filterTypes = cmdArgs[FilterSwitch].ToLower ().Split (',').ToList (); } if ( cmdArgs.ContainsKey (URLParamsSwitch) ) { hitsPerURLParams = cmdArgs[URLParamsSwitch].ToLower ().Split (',').ToList (); } float concurrencyWindow = 5; if ( cmdArgs.ContainsKey (ConcurrencySwitch) ) { concurrencyWindow = float.Parse (cmdArgs[ConcurrencySwitch]); } else cmdArgs.Add (ConcurrencySwitch, concurrencyWindow.ToString ()); int topPagesPerDay = 10; if ( cmdArgs.ContainsKey (TopPagesSwitch) ) { topPagesPerDay = int.Parse (cmdArgs[TopPagesSwitch]); } else cmdArgs.Add (TopPagesSwitch, topPagesPerDay.ToString ()); int peakHoursCount = 3; if ( cmdArgs.ContainsKey (PeakHoursSwitch) ) { peakHoursCount = int.Parse (cmdArgs[PeakHoursSwitch]); } else cmdArgs.Add (PeakHoursSwitch, peakHoursCount.ToString ()); string exportFileName = null; if ( cmdArgs.ContainsKey (ExportFileSwitch) ) { try { exportFileName = Path.GetFullPath (cmdArgs[ExportFileSwitch]); } catch ( Exception e ) { Console.WriteLine ("Error creating report file:{0},{1}", e.GetType ().Name, e.Message); } } if ( exportFileName == null ) { exportFileName = Path.GetFullPath ("Processing results_" + DateTime.Now.ToString ("dd_hh_mm") + ".xlsx"); Console.WriteLine ("Writing output to {0}", exportFileName); } string curerntPath; if ( cmdArgs.ContainsKey (FolderSwitch) ) { try { curerntPath = Path.GetFullPath (cmdArgs[FolderSwitch]); } catch ( Exception e ) { Console.WriteLine ("Error accessing folder {0}:{1},{2}", cmdArgs[FolderSwitch], e.GetType ().Name, e.Message); return 1; } } else { curerntPath = Directory.GetCurrentDirectory (); Console.WriteLine ("Working on IIS logs from current folder {0}", curerntPath); } #endregion Stopwatch stopWatch = new Stopwatch (); stopWatch.Start (); //var files = Directory.GetFiles(curerntPath, "*.log").ToList(); var files = new DirectoryInfo (curerntPath) .GetFiles ("*.log") .OrderBy (f => f.LastWriteTime) .Select (f => f.FullName) .ToArray (); var totalFile = files.Count (); if ( totalFile == 0 ) { Console.WriteLine ("No log files found!!"); return 0; } Console.WriteLine ("Found {0} log files", totalFile); var tmpFile = System.IO.Path.GetTempFileName (); int fileCount = 0; int headerRows = 4; int entryCount = 0; List<IISLogEntry> processingList = new List<IISLogEntry> (); DateTime nextTime = DateTime.MinValue; long TotalHits = 0, ServedRequests = 0; List<ConcurrentRequest> requests = new List<ConcurrentRequest> (); HashSet<string> uniqueIPs = new HashSet<string> (); Dictionary<int, int> httpStatus = new Dictionary<int, int> (); Dictionary<string, MethodInfo> pageViewsForPeriod = new Dictionary<string, MethodInfo> (); int totalDays = 0, totalHours = 0; Dictionary<string, MethodInfo> pageViewsDaily = new Dictionary<string, MethodInfo> (); HashSet<MethodInfo> dailyPages = new HashSet<MethodInfo> (); Dictionary<string, MethodInfo> pageViewsHourly = new Dictionary<string, MethodInfo> (); HashSet<MethodInfo> hourlyPages = new HashSet<MethodInfo> (); //hits for key URL parameters Dictionary<string, MethodInfo> urlParamHits = new Dictionary<string, MethodInfo> (); DateTime firstEntry = DateTime.MinValue, lastEntry = DateTime.MinValue; //placeholder HashSet<MethodInfo> filteredEntries = new HashSet<MethodInfo> (); int startRow = 1, startCol = 1; int reportRow = 2, reportCol = 1; Console.WriteLine ("Preparing to Process.."); foreach ( var f in files ) { try { ++fileCount; var progress = fileCount * 100 / totalFile; IEnumerable<string> matchedEntries = null; var contents = File.ReadLines (f); Dictionary<string, int> fieldIndex = new Dictionary<string, int> (); #region Content filter if ( filterTypes.Any () && ignoredTypes.Any () ) matchedEntries = contents.Where (s => s.StartsWith ("#") || ( filterTypes.Any (x => s.ToLower ().Contains (x)) && !ignoredTypes.Any (x => s.ToLower ().Contains (x)) )); else if ( filterTypes.Any () ) matchedEntries = contents.Where (s => s.StartsWith ("#") || filterTypes.Any (x => s.ToLower ().Contains (x))); else if ( ignoredTypes.Any () ) matchedEntries = contents.Where (s => s.StartsWith ("#") || !ignoredTypes.Any (x => s.ToLower ().Contains (x))); else matchedEntries = contents; foreach ( var rawLogEntry in matchedEntries ) { IISLogEntry logEntry; if ( rawLogEntry.StartsWith ("#") ) { if ( rawLogEntry.StartsWith ("#Fields:") ) fieldIndex = ParseHeaderFields (rawLogEntry); } else { Console.Write ("\r{0} File {1} of {2} files ({3}%), processing {4} ", stopWatch.Elapsed.ToString (@"hh\:mm\:ss"), fileCount, totalFile, progress, ++TotalHits); var columns = rawLogEntry.Split (' '); logEntry = new IISLogEntry () { TimeStamp = DateTime.Parse (columns[0] + " " + columns[1]), ClientIPAddress = fieldIndex.ContainsKey (IISLogEntry.propClientIPAddress) ? columns[fieldIndex[IISLogEntry.propClientIPAddress]] : String.Empty, UserName = fieldIndex.ContainsKey (IISLogEntry.propUserName) ? columns[fieldIndex[IISLogEntry.propUserName]] : String.Empty, ServiceNameandInstanceNumber = fieldIndex.ContainsKey (IISLogEntry.propServiceNameandInstanceNumber) ? columns[fieldIndex[IISLogEntry.propServiceNameandInstanceNumber]] : String.Empty, ServerName = fieldIndex.ContainsKey (IISLogEntry.propServerName) ? columns[fieldIndex[IISLogEntry.propServerName]] : String.Empty, ServerIPAddress = fieldIndex.ContainsKey (IISLogEntry.propServerIPAddress) ? columns[fieldIndex[IISLogEntry.propServerIPAddress]] : String.Empty, ServerPort = fieldIndex.ContainsKey (IISLogEntry.propClientIPAddress) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propServerPort]]) : 0, Method = fieldIndex.ContainsKey (IISLogEntry.propMethod) ? columns[fieldIndex[IISLogEntry.propMethod]] : String.Empty, URIStem = fieldIndex.ContainsKey (IISLogEntry.propURIStem) ? columns[fieldIndex[IISLogEntry.propURIStem]] : String.Empty, URIQuery = fieldIndex.ContainsKey (IISLogEntry.propURIQuery) ? columns[fieldIndex[IISLogEntry.propURIQuery]] : String.Empty, HTTPStatus = fieldIndex.ContainsKey (IISLogEntry.propHTTPStatus) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propHTTPStatus]]) : 0, //Win32Status = fieldIndex.ContainsKey(IISLogEntry.propWin32Status) ? Int32.Parse(row[fieldIndex[IISLogEntry.propWin32Status]]) : 0, BytesSent = fieldIndex.ContainsKey (IISLogEntry.propBytesSent) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propBytesSent]]) : 0, BytesReceived = fieldIndex.ContainsKey (IISLogEntry.propBytesReceived) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propBytesReceived]]) : 0, TimeTaken = fieldIndex.ContainsKey (IISLogEntry.propTimeTaken) ? Int32.Parse (columns[fieldIndex[IISLogEntry.propTimeTaken]]) : 0, ProtocolVersion = fieldIndex.ContainsKey (IISLogEntry.propProtocolVersion) ? columns[fieldIndex[IISLogEntry.propProtocolVersion]] : String.Empty, Host = fieldIndex.ContainsKey (IISLogEntry.propHost) ? columns[fieldIndex[IISLogEntry.propHost]] : String.Empty, UserAgent = fieldIndex.ContainsKey (IISLogEntry.propUserAgent) ? columns[fieldIndex[IISLogEntry.propUserAgent]] : String.Empty, Cookie = fieldIndex.ContainsKey (IISLogEntry.propCookie) ? columns[fieldIndex[IISLogEntry.propCookie]] : String.Empty, Referrer = fieldIndex.ContainsKey (IISLogEntry.propReferrer) ? columns[fieldIndex[IISLogEntry.propReferrer]] : String.Empty, ProtocolSubstatus = fieldIndex.ContainsKey (IISLogEntry.propProtocolSubstatus) ? columns[fieldIndex[IISLogEntry.propProtocolSubstatus]] : String.Empty }; #endregion #region entry processing var url = logEntry.URIStem.ToLower (); #region HTTP status codes & IP if ( httpStatus.ContainsKey (logEntry.HTTPStatus) ) httpStatus[logEntry.HTTPStatus]++; else httpStatus.Add (logEntry.HTTPStatus, 1); if ( !uniqueIPs.Contains (logEntry.ClientIPAddress) ) uniqueIPs.Add (logEntry.ClientIPAddress); #endregion if ( nextTime == DateTime.MinValue ) { firstEntry = logEntry.TimeStamp; lastEntry = logEntry.TimeStamp; nextTime = logEntry.TimeStamp.Date. AddHours (logEntry.TimeStamp.Hour). AddMinutes (logEntry.TimeStamp.Minute). AddMinutes (concurrencyWindow); } if ( logEntry.TimeStamp > nextTime ) { if ( processingList.Any () ) { requests.Add (new ConcurrentRequest (concurrencyWindow) { TimeStamp = nextTime, Transactions = processingList.Count, AverageResponseTime = processingList.Average (p => p.TimeTaken), BytesSent = processingList.Sum (t => t.BytesSent) }); processingList.Clear (); } else { requests.Add (new ConcurrentRequest (concurrencyWindow) { TimeStamp = nextTime, Transactions = 0, AverageResponseTime = 0, BytesSent = 0 }); } nextTime = nextTime.AddMinutes (concurrencyWindow); } if ( lastEntry.Hour != logEntry.TimeStamp.Hour ) { totalHours++; AddHourlyPages (pageViewsHourly, hourlyPages, lastEntry); } if ( lastEntry.Date != logEntry.TimeStamp.Date ) { totalDays++; AddDailyPages (pageViewsDaily, dailyPages, lastEntry); } //add the current one to future processing, otherwise one in teh borderlien will be missing if ( logEntry.HTTPStatus == 200 ) { processingList.Add (logEntry); ServedRequests++; if ( pageViewsForPeriod.ContainsKey (url) ) pageViewsForPeriod[url].Hit (logEntry.TimeTaken); else pageViewsForPeriod.Add (url, new MethodInfo (logEntry.URIStem, logEntry.TimeTaken)); if ( lastEntry.Hour == logEntry.TimeStamp.Hour ) { if ( pageViewsHourly.ContainsKey (url) ) pageViewsHourly[url].Hit (logEntry.TimeTaken); else pageViewsHourly.Add (url, new MethodInfo (logEntry.URIStem, logEntry.TimeTaken)); } if ( lastEntry.Date == logEntry.TimeStamp.Date ) { if ( pageViewsDaily.ContainsKey (url) ) pageViewsDaily[url].Hit (logEntry.TimeTaken); else pageViewsDaily.Add (url, new MethodInfo (logEntry.URIStem, logEntry.TimeTaken)); } if ( hitsPerURLParams.Any () ) { var urlParam = hitsPerURLParams.Where (p => logEntry.URIQuery.Contains (p)).FirstOrDefault (); if ( urlParam != null && urlParam != String.Empty ) { if ( urlParamHits.ContainsKey (url) ) urlParamHits[url].Hit (logEntry.TimeTaken); else urlParamHits.Add (url, new MethodInfo (urlParam, logEntry.TimeTaken)); } } } lastEntry = logEntry.TimeStamp; } } if ( processingList.Any () ) { requests.Add (new ConcurrentRequest (concurrencyWindow) { TimeStamp = nextTime, Transactions = processingList.Count, AverageResponseTime = processingList.Average (p => p.TimeTaken), BytesSent = processingList.Sum (t => t.BytesSent) }); processingList.Clear (); } AddHourlyPages (pageViewsHourly, hourlyPages, lastEntry); AddDailyPages (pageViewsDaily, dailyPages, lastEntry); #endregion } catch ( Exception e ) { Console.WriteLine ("Error!! {0}:{1} - {2}", e.GetType ().Name, e.Message, e.StackTrace); Debug.WriteLine ("Error!! {0}:{1}", e.GetType ().Name, e.Message); } } Console.WriteLine ("\nGenerating Statistics"); #region resultprocessing IEnumerable<MethodInfo> topPages; IEnumerable<IGrouping<DateTime, MethodInfo>> hourlyHits = null; long peakHits; IEnumerable<IGrouping<DateTime, MethodInfo>> peakHourPages = null; try { excelApp = new Application (); excelApp.Visible = false; reportSpreadsheet = excelApp.Workbooks.Add (); excelApp.Calculation = XlCalculation.xlCalculationManual; reportSheet = reportSpreadsheet.ActiveSheet; #region Concurrent Users if ( requests.Any () ) { Console.WriteLine ("{0} Calculating Concurrent User Count", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet.Name = "Concurrent Users"; reportSheet.Cells[reportRow, reportCol++] = "Timestamp"; reportSheet.Cells[reportRow, reportCol++] = "Requests"; reportSheet.Cells[reportRow, reportCol++] = "TPS"; reportSheet.Cells[reportRow, reportCol++] = "Average Response Time"; reportSheet.Cells[reportRow, reportCol++] = "Concurrent Users (based on Little's Law)"; reportSheet.Cells[reportRow, reportCol++] = "Bytes Sent"; reportSheet.Cells[reportRow, reportCol++] = "Network Speed (Mbps)"; foreach ( var p in requests ) { reportCol = 1; reportRow++; reportSheet.Cells[reportRow, reportCol++] = p.TimeStamp; reportSheet.Cells[reportRow, reportCol++] = p.Transactions; reportSheet.Cells[reportRow, reportCol++] = p.Tps; reportSheet.Cells[reportRow, reportCol++] = p.AverageResponseTime; reportSheet.Cells[reportRow, reportCol++] = p.ConcurrentUsers; reportSheet.Cells[reportRow, reportCol++] = p.BytesSent; reportSheet.Cells[reportRow, reportCol++] = p.NetworkSpeed; } } #endregion reportSpreadsheet.Application.DisplayAlerts = false; reportSpreadsheet.SaveAs (exportFileName, ConflictResolution: XlSaveConflictResolution.xlLocalSessionChanges); #region Page visit Summary if ( pageViewsForPeriod.Any () ) { Console.WriteLine ("{0} Genrating Page visit Summary", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (Type.Missing, reportSheet, 1); reportSheet.Name = "Page visit Summary"; startRow = startCol = 1; startRow = CollectionToTable (pageViewsForPeriod.Values, startRow, startCol, "Page visit Summary (for the period)"); reportSheet.Shapes.AddChart (XlChartType.xlLine).Select (); excelApp.ActiveChart.SetSourceData (Source: reportSheet.get_Range ("A1:B" + startRow)); reportSheet.Shapes.AddChart (XlChartType.xlPie).Select (); excelApp.ActiveChart.SetSourceData (Source: reportSheet.get_Range ("A1:B" + startRow)); excelApp.ActiveChart.ClearToMatchStyle (); try { excelApp.ActiveChart.ChartStyle = 256; } catch ( Exception e ) { } excelApp.ActiveChart.SetElement (Microsoft.Office.Core.MsoChartElementType.msoElementChartTitleAboveChart); excelApp.ActiveChart.ChartTitle.Text = "Page visit Summary (for the period) Most Visited Pages"; reportSheet.Shapes.AddChart (XlChartType.xlBarClustered).Select (); excelApp.ActiveChart.SetSourceData (Source: reportSheet.get_Range ("A1:D" + startRow)); excelApp.ActiveChart.ClearToMatchStyle (); try { excelApp.ActiveChart.ChartStyle = 222; } catch ( Exception e ) { } excelApp.ActiveChart.SetElement (Microsoft.Office.Core.MsoChartElementType.msoElementChartTitleAboveChart); excelApp.ActiveChart.ChartTitle.Text = "Page visit Summary (for the period) Average Response Time"; SpreadCharts (reportSheet); } #endregion #region Daily Analysis if ( dailyPages.Any () ) { Console.WriteLine ("{0} Genrating Daily Statistics", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (Type.Missing, reportSheet, 1); reportSheet.Name = "Daily Analysis"; foreach ( var d in dailyPages.Select (p => p.Timestamp).Distinct () ) { filteredEntries.UnionWith (dailyPages.Where (p => p.Timestamp == d.Date) .OrderByDescending (p => p.Hits).Take (topPagesPerDay)); //Debug.WriteLine("Date: {0} - {1}", date, MethodInfo.TotalHits(dailyPages.Where(p => p.Timestamp == d.Date))); } topPages = filteredEntries.Where (p => filteredEntries.Count (q => q.Url == p.Url) > totalDays / 2); startRow = startCol = 1; AddChartFromSeries (startRow, startCol, "Daily Top Pages - Visits Trend", topPages, p => p.Hits, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); startRow = reportRow + 10; startCol = 1; AddChartFromSeries (startRow, startCol, "Daily Top Pages - Response Time(Average) Trend", topPages, p => p.AvgResponseTime, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); startRow = reportRow + 10; startCol = 1; AddChartFromSeries (startRow, startCol, "Daily Top Pages - Response Time(90%tile) Trend", topPages, p => p.NinetiethPercentile, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); startRow = 1; startCol = 30; filteredEntries.Clear (); //reportSheet.Cells[reportRow, reportCol] = "Date"; foreach ( var d in dailyPages.Select (p => p.Timestamp).Distinct () ) { filteredEntries.UnionWith (dailyPages.Where (p => p.Timestamp == d.Date) .OrderByDescending (p => p.NinetiethPercentile).Take (topPagesPerDay)); } topPages = filteredEntries.Where (p => filteredEntries.Count (q => q.Url == p.Url) > totalDays / 2); AddChartFromSeries (startRow, startCol, "Daily Slow Pages - Response Time(90%tile) Trend", topPages, p => p.NinetiethPercentile, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); startRow = reportRow + 10; startCol = 30; filteredEntries.Clear (); //reportSheet.Cells[reportRow, reportCol] = "Date"; foreach ( var d in dailyPages.Select (p => p.Timestamp).Distinct () ) { filteredEntries.UnionWith (dailyPages.Where (p => p.Timestamp == d.Date) .OrderByDescending (p => p.AvgResponseTime).Take (topPagesPerDay)); //Debug.WriteLine("Date: {0} - {1}", date, MethodInfo.TotalHits(dailyPages.Where(p => p.Timestamp == d.Date))); } topPages = filteredEntries.Where (p => filteredEntries.Count (q => q.Url == p.Url) > totalDays / 2); AddChartFromSeries (startRow, startCol, "Daily Slow Pages - Response Time(Average) Trend", topPages, p => p.AvgResponseTime, d => d.ToString (DateTimeFormatInfo.CurrentInfo.ShortDatePattern)); SpreadCharts (reportSheet); } #endregion #region Hourly analysis if ( hourlyPages.Any () ) { Console.WriteLine ("{0} Genrating Hourly Statistics", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (Type.Missing, reportSheet, 1); reportSheet.Name = "Hourly Analysis"; startRow = 1; startCol = 1; filteredEntries.Clear (); foreach ( var d in hourlyPages.Select (p => p.Timestamp).Distinct () ) { filteredEntries.UnionWith (hourlyPages.Where (p => p.Timestamp == d.Date.AddHours (d.Hour)) .OrderByDescending (p => p.Hits).Take (topPagesPerDay)); //Debug.WriteLine("Date: {0} - {1}", date, MethodInfo.TotalHits(dailyPages.Where(p => p.Timestamp == d.Date))); } var totalHits = hourlyPages.Sum (p => p.Hits); //filter out top pages which are there for 10% of time or 2% traffic topPages = filteredEntries.Where (p => filteredEntries.Count (q => q.Url == p.Url) > totalHours / 10 || p.Hits > totalHits * 2 / 100); startRow += AddChartFromSeries (startRow, startCol, "Hourly Top Pages Summary (By Hits)", topPages, p => p.Hits, d => d.ToString ()); excelApp.ActiveChart.Axes (XlAxisType.xlCategory).CategoryType = XlCategoryType.xlCategoryScale; hourlyHits = hourlyPages.GroupBy (p => p.Timestamp, q => q); peakHits = hourlyHits.Select (p => p.Sum (q => q.Hits)).OrderByDescending (p => p).Take (peakHoursCount).Min (); peakHourPages = hourlyHits.Where (p => p.Sum (q => q.Hits) >= peakHits); startRow += 10; startCol = 1; startRow += AddChartFromSeries (startRow, startCol, "Peak Hour Top Pages Summary (By Hits)", peakHourPages.SelectMany (g => g.Where (p => p.Hits > peakHits * 2 / 100)), p => p.Hits, d => d.ToString ()); excelApp.ActiveChart.Axes (XlAxisType.xlCategory).CategoryType = XlCategoryType.xlCategoryScale; CollectionToTable (peakHourPages.SelectMany (g => g), startRow + 10, 1, "Peak Hour Pages", true); SpreadCharts (reportSheet); } #endregion #region URL Param Hits Summary if ( hitsPerURLParams.Any () ) { Console.WriteLine ("{0} Genrating URL parameter statistics", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (Type.Missing, reportSheet, 1); startRow = startCol = 1; reportSheet.Name = "URL Parameters"; CollectionToTable (urlParamHits.Values, startRow, startCol, "URL Parameters Summary (for the period)"); } #endregion #region Summary Console.WriteLine ("{0} Genrating Summary", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); reportSheet = reportSpreadsheet.Worksheets.Add (reportSheet, Type.Missing, 1); reportRow = reportCol = 1; reportSheet.Name = "Summary"; reportSheet.Cells[reportRow, 1] = "Running From"; reportSheet.Cells[reportRow++, 2] = curerntPath; reportSheet.Cells[reportRow, 1] = "Commandline Argument"; reportSheet.Cells[reportRow++, 2] = string.Join (";", cmdArgs.Select (x => x.Key + "=" + x.Value)); reportSheet.Cells[reportRow, 1] = "Files Processed"; reportSheet.Cells[reportRow++, 2] = fileCount; reportSheet.Cells[reportRow, 1] = "From"; reportSheet.Cells[reportRow++, 2] = firstEntry; reportSheet.Cells[reportRow, 1] = "To"; reportSheet.Cells[reportRow++, 2] = lastEntry; reportSheet.Cells[reportRow, 1] = "TotalHits"; reportSheet.Cells[reportRow++, 2] = TotalHits; reportSheet.Cells[reportRow, 1] = "Average Transactions/Sec"; reportSheet.Cells[reportRow++, 2] = requests.Average (p => p.Tps); if ( hourlyHits!=null ) { reportSheet.Cells[reportRow, 1] = "Average Transactions/Hour"; reportSheet.Cells[reportRow++, 2] = hourlyHits.Average (p => p.Sum (q => q.Hits)); } if ( peakHourPages!=null ) { reportSheet.Cells[reportRow, 1] = "Peak Hour Transactions/Hour"; reportSheet.Cells[reportRow++, 2] = peakHourPages.Average (p => p.Sum (q => q.Hits)); reportSheet.Cells[reportRow, 1] = "Peak Hour Transactions/Sec"; reportSheet.Cells[reportRow++, 2] = peakHourPages.Average (p => p.Sum (q => q.Hits) / 3600); } reportSheet.Cells[reportRow, 1] = "UniqueIPs"; reportSheet.Cells[reportRow++, 2] = uniqueIPs.Count; reportSheet.Cells[reportRow, 1] = "ServedRequests"; reportSheet.Cells[reportRow++, 2] = ServedRequests; reportRow += 10; reportSheet.Cells[reportRow++, 1] = "Http Status code summary"; reportSheet.Cells[reportRow, 1] = "HTTP Code"; reportSheet.Cells[reportRow++, 2] = "Count"; foreach ( var i in httpStatus ) { reportSheet.Cells[reportRow, reportCol++] = i.Key; reportSheet.Cells[reportRow++, reportCol--] = ( i.Value ); } #endregion } catch ( Exception e ) { Console.WriteLine ("Error!! {0}:{1} - {2}", e.GetType ().Name, e.Message, e.StackTrace); Debug.WriteLine ("Error!! {0}:{1}", e.GetType ().Name, e.Message); } finally { if ( excelApp != null ) { excelApp.Calculation = XlCalculation.xlCalculationAutomatic; if ( reportSpreadsheet != null ) { reportSpreadsheet.Save (); reportSpreadsheet.Close (); excelApp.Quit (); } } File.Delete (tmpFile); stopWatch.Stop (); Console.WriteLine ("Done, Final time : {0}", stopWatch.Elapsed.ToString (@"hh\:mm\:ss")); } #endregion return 0; }
internal static ReadOnlyCollection<MethodMockableResult> GetMockableMethods(this Type @this, NameGenerator generator) { var objectMethods = @this.IsInterface ? typeof(object).GetMethods().Where(_ => _.IsExtern() || _.IsVirtual).ToList() : new List<MethodInfo>(); var methods = new HashSet<MockableResult<MethodInfo>>(@this.GetMethods(ReflectionValues.PublicNonPublicInstance) .Where(_ => !_.IsSpecialName && _.IsVirtual && !_.IsFinal && !objectMethods.Where(om => om.Match(_) == MethodMatch.Exact).Any() && _.DeclaringType.Assembly.CanBeSeenByMockAssembly(_.IsPublic, _.IsPrivate, _.IsFamily, _.IsFamilyOrAssembly, generator)) .Select(_ => new MockableResult<MethodInfo>(_, RequiresExplicitInterfaceImplementation.No))); if (@this.IsInterface) { var namespaces = new SortedSet<string>(); foreach (var @interface in @this.GetInterfaces()) { var interfaceMethods = @interface.GetMethods() .Where(_ => !_.IsSpecialName && !objectMethods.Where(om => om.Match(_) == MethodMatch.Exact).Any()); foreach (var interfaceMethod in interfaceMethods) { if (interfaceMethod.CanBeSeenByMockAssembly(generator)) { var matchMethodGroups = methods.GroupBy(_ => interfaceMethod.Match(_.Value)).ToDictionary(_ => _.Key); if (!matchMethodGroups.ContainsKey(MethodMatch.Exact)) { methods.Add(new MockableResult<MethodInfo>( interfaceMethod, matchMethodGroups.ContainsKey(MethodMatch.DifferByReturnTypeOnly) ? RequiresExplicitInterfaceImplementation.Yes : RequiresExplicitInterfaceImplementation.No)); } } } } } var baseStaticMethods = @this.IsInterface ? typeof(object).GetMethods().Where(_ => _.IsStatic).ToList() : @this.GetMethods().Where(_ => _.IsStatic).ToList(); return methods.Select(_ => new MethodMockableResult( _.Value, _.RequiresExplicitInterfaceImplementation, baseStaticMethods.Where(osm => osm.Match(_.Value) == MethodMatch.Exact).Any() ? RequiresIsNewImplementation.Yes : RequiresIsNewImplementation.No)).ToList().AsReadOnly(); }
internal static ReadOnlyCollection<PropertyMockableResult> GetMockableProperties(this Type @this, NameGenerator generator) { var properties = new HashSet<PropertyMockableResult>( from property in @this.GetProperties(ReflectionValues.PublicNonPublicInstance) let canGet = property.CanRead && property.GetMethod.IsVirtual && !property.GetMethod.IsFinal && property.GetMethod.DeclaringType.Assembly.CanBeSeenByMockAssembly( property.GetMethod.IsPublic, property.GetMethod.IsPrivate, property.GetMethod.IsFamily, property.GetMethod.IsFamilyOrAssembly, generator) let canSet = property.CanWrite && property.SetMethod.IsVirtual && !property.SetMethod.IsFinal && property.SetMethod.DeclaringType.Assembly.CanBeSeenByMockAssembly( property.SetMethod.IsPublic, property.SetMethod.IsPrivate, property.SetMethod.IsFamily, property.SetMethod.IsFamilyOrAssembly, generator) where canGet || canSet select new PropertyMockableResult(property, RequiresExplicitInterfaceImplementation.No, (canGet && canSet ? PropertyAccessors.GetAndSet : (canGet ? PropertyAccessors.Get : PropertyAccessors.Set)))); if (@this.IsInterface) { var namespaces = new SortedSet<string>(); foreach (var @interface in @this.GetInterfaces()) { foreach (var interfaceProperty in @interface.GetMockableProperties(generator)) { if (interfaceProperty.Value.GetDefaultMethod().CanBeSeenByMockAssembly(generator)) { var matchMethodGroups = properties.GroupBy(_ => interfaceProperty.Value.GetDefaultMethod().Match(_.Value.GetDefaultMethod())).ToDictionary(_ => _.Key); if (!matchMethodGroups.ContainsKey(MethodMatch.Exact)) { properties.Add(new PropertyMockableResult(interfaceProperty.Value, matchMethodGroups.ContainsKey(MethodMatch.DifferByReturnTypeOnly) ? RequiresExplicitInterfaceImplementation.Yes : RequiresExplicitInterfaceImplementation.No, interfaceProperty.Accessors)); } } } } } return properties.ToList().AsReadOnly(); }
private void MarkMessages() { HashSet<MarkRequest> hash; using (_rwLock.GetReadLock()) hash = new HashSet<MarkRequest>(_requests); foreach (var group in hash .GroupBy(rq => rq.IsRead) .Select(grp => new { IsRead = grp.Key, Ids = grp.SelectMany(rq => rq.MsgIds) })) MarkMsgsRead( _provider, group.Ids, group.IsRead); using (_rwLock.GetWriteLock()) _requests.RemoveAll(hash.Contains); foreach (var notificator in hash.Select(rq => rq.MarkFinished)) notificator(); }
public static void Main(string[] args) { var list = Utilities.ReadFile(args[0]); var orderedList = list.OrderBy(o => o).ToList(); var regex = new Regex(@"(\d{4})-(\d{2})-(\d{2}) (\d{2}):(\d{2})"); var guardList = new HashSet <Guard>(); Console.WriteLine("Part 1"); Guard currentGuard = null; foreach (string s in orderedList) { var matches = regex.Match(s); if (s.Contains("Guard")) { var splitString = s.Split(' '); currentGuard = new Guard(splitString[3]); guardList.Add(currentGuard); } GroupCollection groups = matches.Groups; var date = new DateTime(Int32.Parse(groups[1].Value), Int32.Parse(groups[2].Value), Int32.Parse(groups[3].Value), Int32.Parse(groups[4].Value), Int32.Parse(groups[5].Value), 0); if (currentGuard != null) { currentGuard.AddSchedule(date.Hour != 0 ? new DateTime(date.Year, date.Month, date.AddDays(1).Day, 0, 0, 0) : date, (s.Contains("wakes") || s.Contains("begins")) ? true : false); } } foreach (Guard g in guardList) { var timeAsleep = 0; for (int i = 1; i < g.Schedules.Count; ++i) { if (g.Schedules[i - 1].Awake == false && g.Schedules[i].Awake == true) { timeAsleep += (int)(g.Schedules[i].Time.Subtract(g.Schedules[i - 1].Time).TotalMinutes); } } g.MinutesAsleep = timeAsleep; } var sumGuardList = guardList .GroupBy(g => g.ID) .Select(grp => new string[] { grp.First().ID, grp.Sum(g => g.MinutesAsleep).ToString() } ).OrderByDescending(s => s[1]).ToList(); var maxSleepingGuard = sumGuardList.First(); Console.WriteLine(int.Parse(maxSleepingGuard[0].Replace("#", string.Empty)) * FindMinuteSpentMostAsleep(maxSleepingGuard[0], guardList)); Console.WriteLine("Part 2"); var groupedGuardList = new Dictionary <string, Tuple <int, int> >(); var uniqueIDs = guardList.Select(g => g.ID).Distinct(); Tuple <int, int> current = Tuple.Create(0, 0); foreach (var id in uniqueIDs) { current = FindNumberOfTimesSpentAsleep(guardList.Where(g => g.ID == id).ToList()); if (!groupedGuardList.ContainsKey(id)) { groupedGuardList.Add(id, current); } } Console.WriteLine(int.Parse(groupedGuardList.First().Key.Replace("#", string.Empty)) * groupedGuardList.First().Value.Item2); Console.ReadKey(); }
private void Initialize() { if (null == _consumingMethod) throw new InvalidOperationException("Consuming method has not been defined"); if (!_assemblies.Any()) throw new InvalidOperationException("There are no assemblies to scan"); var types = _assemblies .SelectMany(a => { try { return a.GetExportedTypes(); } catch{} return new Type[0]; }).Where(x=>x!=null) .ToList(); var messageTypes = types.Where(x => !x.IsInterface && typeof(IMessage).IsAssignableFrom(x)).ToArray(); var consumerTypes = types.Where(x => typeof(IConsume).IsAssignableFrom(x)).ToArray(); var consumingDirectly = consumerTypes .SelectMany(consumerType => GetConsumedMessages(consumerType) .Select(messageType => new MessageMapping(consumerType, messageType, true))) .ToArray(); var result = new HashSet<MessageMapping>(); foreach (var m in consumingDirectly) { result.Add(m); } var allMessages = result.Select(m => m.Message).ToList(); foreach (var messageType in messageTypes) { if (!allMessages.Contains(messageType)) { allMessages.Add(messageType); result.Add(new MessageMapping(typeof(MessageMapping.BusNull), messageType, true)); } } _consumerInfos = result .GroupBy(x => x.Consumer) .Select(x => { var directs = x .Where(m => m.Direct) .Select(m => m.Message) .Distinct(); var assignables = x .Select(m => m.Message) .Where(t => directs.Any(d => d.IsAssignableFrom(t))) .Distinct(); return new ConsumerInfo(x.Key, assignables.ToArray()); }).ToList(); _messageInfos = result .ToLookup(x => x.Message) .Select(x => { var domainConsumers = x .Where(t => t.Consumer != typeof(MessageMapping.BusNull)) .ToArray(); return new MessageInfo { MessageType = x.Key, AllConsumers = domainConsumers.Select(m => m.Consumer).Distinct().ToArray(), DerivedConsumers = domainConsumers.Where(m => !m.Direct).Select(m => m.Consumer).Distinct().ToArray(), DirectConsumers = domainConsumers.Where(m => m.Direct).Select(m => m.Consumer).Distinct().ToArray(), }; }).ToList(); var includedTypes = _messageInfos .Select(m => m.MessageType).ToList(); // message directory should still include all messages for the serializers var orphanedMessages = result .Where(m => !includedTypes.Contains(m.Message)) .Select(m => new MessageInfo { MessageType = m.Message, AllConsumers = Type.EmptyTypes, DerivedConsumers = Type.EmptyTypes, DirectConsumers = Type.EmptyTypes }); _messageInfos.AddRange(orphanedMessages); }
protected virtual IEnumerable <AuthorizationToken> GetAuthorizedTokens(string @namespace, uint memberId, MemberType memberType) { var conditions = Condition.Equal("MemberId", memberId) & Condition.Equal("MemberType", memberType); //获取指定成员的所有上级角色集和上级角色的层级列表 if (MembershipHelper.GetAncestors(this.DataAccess, @namespace, memberId, memberType, out var flats, out var hierarchies) > 0) { //如果指定成员有上级角色,则进行权限定义的查询条件还需要加上所有上级角色 conditions = ConditionCollection.Or( conditions, Condition.In("MemberId", flats.Select(p => p.RoleId)) & Condition.Equal("MemberType", MemberType.Role) ); } //获取指定条件的所有权限定义(注:禁止分页查询,并即时加载到数组中) var permissions = this.DataAccess.Select <Permission>(conditions, Paging.Disabled).ToArray(); //获取指定条件的所有权限过滤定义(注:禁止分页查询,并即时加载到数组中) var permissionFilters = this.DataAccess.Select <PermissionFilter>(conditions, Paging.Disabled).ToArray(); var states = new HashSet <AuthorizationState>(); IEnumerable <Permission> prepares; IEnumerable <AuthorizationState> grants, denies; //如果上级角色层级列表不为空则进行分层过滤 if (hierarchies != null && hierarchies.Count > 0) { //从最顶层(即距离指定成员最远的层)开始到最底层(集距离指定成员最近的层) for (int i = hierarchies.Count - 1; i >= 0; i--) { //定义权限集过滤条件:当前层级的角色集的所有权限定义 prepares = permissions.Where(p => hierarchies[i].Any(role => role.RoleId == p.MemberId) && p.MemberType == MemberType.Role); grants = prepares.Where(p => p.Granted).Select(p => new AuthorizationState(p.SchemaId, p.ActionId)).ToArray(); denies = prepares.Where(p => !p.Granted).Select(p => new AuthorizationState(p.SchemaId, p.ActionId)).ToArray(); states.UnionWith(grants); //合并授予的权限定义 states.ExceptWith(denies); //排除拒绝的权限定义 //更新授权集中的相关目标的过滤文本 this.SetPermissionFilters(states, permissionFilters.Where(p => hierarchies[i].Any(role => role.RoleId == p.MemberId) && p.MemberType == MemberType.Role)); } } //查找权限定义中当前成员的设置项 prepares = permissions.Where(p => p.MemberId == memberId && p.MemberType == memberType); grants = prepares.Where(p => p.Granted).Select(p => new AuthorizationState(p.SchemaId, p.ActionId)).ToArray(); denies = prepares.Where(p => !p.Granted).Select(p => new AuthorizationState(p.SchemaId, p.ActionId)).ToArray(); states.UnionWith(grants); //合并授予的权限定义 states.ExceptWith(denies); //排除拒绝的权限定义 //更新授权集中的相关目标的过滤文本 this.SetPermissionFilters(states, permissionFilters.Where(p => p.MemberId == memberId && p.MemberType == memberType)); foreach (var group in states.GroupBy(p => p.SchemaId)) { yield return(new AuthorizationToken(group.Key, group.Select(p => new AuthorizationToken.ActionToken(p.ActionId, p.Filter)))); } }
public override HashSet <UserData> GetUserData() { using (Nop280DataContext context = GetContext()) { HashSet <Nop280_Customer> customers = context.Nop280_Customers.Where(x => !x.Deleted && x.Email != null && x.Password != null).ToHashSet(); HashSet <Nop280_GenericAttribute> attributes = context.Nop280_GenericAttributes.Where(attribute => attribute.KeyGroup == "Customer").ToHashSet(); Dictionary <int, HashSet <Nop280_GenericAttribute> > attributeUserDictionary = attributes.GroupBy( x => x.EntityId) .ToDictionary(grouping => grouping.Key, grouping => grouping.ToHashSet()); HashSet <Nop280_CustomerAddress> addresses = context.Nop280_CustomerAddresses.ToHashSet(); var userDatas = new HashSet <UserData>(); foreach (Nop280_Customer customer in customers) { Nop280_Customer thisCustomer = customer; Dictionary <string, string> customerAttributes = attributeUserDictionary.ContainsKey(customer.Id) ? attributeUserDictionary[customer.Id] .GroupBy(attribute => attribute.Key) .ToDictionary(grouping => grouping.Key, attribute => attribute.First().Value) : new Dictionary <string, string>(); HashSet <Nop280_Address> customerAddresses = addresses.FindAll(x => x.Customer_Id == thisCustomer.Id) .Select(address => address.Nop280_Address) .ToHashSet(); userDatas.Add(new UserData { Id = thisCustomer.Id, Email = thisCustomer.Email, Salt = thisCustomer.PasswordSalt, Hash = thisCustomer.Password, Active = thisCustomer.Active, Format = "NopSHA1", Guid = thisCustomer.CustomerGuid, FirstName = customerAttributes.ContainsKey(FirstNameKey) ? customerAttributes[FirstNameKey] : string.Empty, LastName = customerAttributes.ContainsKey(LastNameKey) ? customerAttributes[LastNameKey] : string.Empty, AddressData = customerAddresses.Select(GetAddressDataObject).ToHashSet() }); } return(userDatas); } }
/// <summary> /// Generates a syntax tree for the provided assemblies. /// </summary> /// <param name="targetAssembly">The assemblies used for accessibility checks, or <see langword="null"/> during runtime code generation.</param> /// <param name="assemblies">The assemblies to generate code for.</param> /// <returns>The generated syntax tree.</returns> private GeneratedSyntax GenerateCode(Assembly targetAssembly, List <Assembly> assemblies) { var features = new FeatureDescriptions(); var members = new List <MemberDeclarationSyntax>(); // Expand the list of included assemblies and types. var knownAssemblies = new Dictionary <Assembly, KnownAssemblyAttribute>( assemblies.ToDictionary(k => k, k => default(KnownAssemblyAttribute))); foreach (var attribute in assemblies.SelectMany(asm => asm.GetCustomAttributes <KnownAssemblyAttribute>())) { knownAssemblies[attribute.Assembly] = attribute; } if (logger.IsEnabled(LogLevel.Information)) { logger.Info($"Generating code for assemblies: {string.Join(", ", knownAssemblies.Keys.Select(a => a.FullName))}"); } // Get types from assemblies which reference Orleans and are not generated assemblies. var grainClasses = new HashSet <Type>(); var grainInterfaces = new HashSet <Type>(); foreach (var pair in knownAssemblies) { var assembly = pair.Key; var treatTypesAsSerializable = pair.Value?.TreatTypesAsSerializable ?? false; foreach (var type in TypeUtils.GetDefinedTypes(assembly, this.logger)) { if (treatTypesAsSerializable || type.IsSerializable || TypeHasKnownBase(type)) { string logContext = null; if (logger.IsEnabled(LogLevel.Trace)) { if (treatTypesAsSerializable) { logContext = $"known assembly {assembly.GetName().Name} where 'TreatTypesAsSerializable' = true"; } else if (type.IsSerializable) { logContext = $"known assembly {assembly.GetName().Name} where type is [Serializable]"; } else if (type.IsSerializable) { logContext = $"known assembly {assembly.GetName().Name} where type has known base type."; } } serializableTypes.RecordType(type, targetAssembly, logContext); } // Include grain interfaces and classes. var isGrainInterface = GrainInterfaceUtils.IsGrainInterface(type); var isGrainClass = TypeUtils.IsConcreteGrainClass(type); if (isGrainInterface || isGrainClass) { // If code generation is being performed at runtime, the interface must be accessible to the generated code. if (!TypeUtilities.IsAccessibleFromAssembly(type, targetAssembly)) { if (this.logger.IsEnabled(LogLevel.Debug)) { this.logger.Debug("Skipping inaccessible grain type, {0}", type.GetParseableName()); } continue; } // Attempt to generate serializers for grain state classes, i.e, T in Grain<T>. var baseType = type.BaseType; if (baseType != null && baseType.IsConstructedGenericType) { foreach (var arg in baseType.GetGenericArguments()) { string logContext = null; if (logger.IsEnabled(LogLevel.Trace)) { logContext = "generic base type of " + type.GetLogFormat(); } this.serializableTypes.RecordType(arg, targetAssembly, logContext); } } // Skip classes generated by this generator. if (IsOrleansGeneratedCode(type)) { if (this.logger.IsEnabled(LogLevel.Debug)) { this.logger.Debug("Skipping generated grain type, {0}", type.GetParseableName()); } continue; } if (this.knownGrainTypes.Contains(type)) { if (this.logger.IsEnabled(LogLevel.Debug)) { this.logger.Debug("Skipping grain type {0} since it already has generated code.", type.GetParseableName()); } continue; } if (isGrainClass) { if (this.logger.IsEnabled(LogLevel.Information)) { this.logger.Info("Found grain implementation class: {0}", type.GetParseableName()); } grainClasses.Add(type); } if (isGrainInterface) { if (this.logger.IsEnabled(LogLevel.Information)) { this.logger.Info("Found grain interface: {0}", type.GetParseableName()); } GrainInterfaceUtils.ValidateInterfaceRules(type); grainInterfaces.Add(type); } } } } // Group the types by namespace and generate the required code in each namespace. foreach (var groupedGrainInterfaces in grainInterfaces.GroupBy(_ => CodeGeneratorCommon.GetGeneratedNamespace(_))) { var namespaceName = groupedGrainInterfaces.Key; var namespaceMembers = new List <MemberDeclarationSyntax>(); foreach (var grainInterface in groupedGrainInterfaces) { var referenceTypeName = GrainReferenceGenerator.GetGeneratedClassName(grainInterface); var invokerTypeName = GrainMethodInvokerGenerator.GetGeneratedClassName(grainInterface); namespaceMembers.Add( GrainReferenceGenerator.GenerateClass( grainInterface, referenceTypeName, encounteredType => { string logContext = null; if (logger.IsEnabled(LogLevel.Trace)) { logContext = "used by grain type " + grainInterface.GetLogFormat(); } this.serializableTypes.RecordType(encounteredType, targetAssembly, logContext); })); namespaceMembers.Add(GrainMethodInvokerGenerator.GenerateClass(grainInterface, invokerTypeName)); var genericTypeSuffix = GetGenericTypeSuffix(grainInterface.GetGenericArguments().Length); features.GrainInterfaces.Add( new GrainInterfaceDescription { Interface = grainInterface.GetTypeSyntax(includeGenericParameters: false), Reference = SF.ParseTypeName(namespaceName + '.' + referenceTypeName + genericTypeSuffix), Invoker = SF.ParseTypeName(namespaceName + '.' + invokerTypeName + genericTypeSuffix), InterfaceId = GrainInterfaceUtils.GetGrainInterfaceId(grainInterface) }); } members.Add(CreateNamespace(namespaceName, namespaceMembers)); } foreach (var type in grainClasses) { features.GrainClasses.Add( new GrainClassDescription { ClassType = type.GetTypeSyntax(includeGenericParameters: false) }); } // Generate serializers into their own namespace. var serializerNamespace = this.GenerateSerializers(targetAssembly, features); members.Add(serializerNamespace); // Add serialization metadata for the types which were encountered. this.AddSerializationTypes(features.Serializers, targetAssembly, knownAssemblies.Keys.ToList()); foreach (var attribute in knownAssemblies.Keys.SelectMany(asm => asm.GetCustomAttributes <ConsiderForCodeGenerationAttribute>())) { this.serializableTypes.RecordType(attribute.Type, targetAssembly, "[ConsiderForCodeGeneration]"); if (attribute.ThrowOnFailure && !this.serializableTypes.IsTypeRecorded(attribute.Type) && !this.serializableTypes.IsTypeIgnored(attribute.Type)) { throw new CodeGenerationException( $"Found {attribute.GetType().Name} for type {attribute.Type.GetParseableName()}, but code" + " could not be generated. Ensure that the type is accessible."); } } // Generate metadata directives for all of the relevant types. var(attributeDeclarations, memberDeclarations) = FeaturePopulatorGenerator.GenerateSyntax(targetAssembly, features); members.AddRange(memberDeclarations); var compilationUnit = SF.CompilationUnit().AddAttributeLists(attributeDeclarations.ToArray()).AddMembers(members.ToArray()); return(new GeneratedSyntax { SourceAssemblies = knownAssemblies.Keys.ToList(), Syntax = compilationUnit }); }
public async Task AnalyzeProjectAsync(Project project, bool semanticsChanged, InvocationReasons reasons, CancellationToken cancellationToken) { lock (_reported) { // to make sure that we don't report while solution load, we do this heuristic. // if the reason we are called is due to "document being added" to project, we wait for next analyze call. // also, we only report usage information per project once. // this telemetry will only let us know which API ever used, this doesn't care how often/many times an API // used. and this data is approximation not precise information. and we don't care much on how many times // APIs used in the same solution. we are rather more interested in number of solutions or users APIs are used. if (reasons.Contains(PredefinedInvocationReasons.DocumentAdded) || _reported.Contains(project.Id)) { return; } } // if this project has cross language p2p references, then pass in solution, otherwise, don't give in // solution since checking whether symbol is cross language symbol or not is expansive and // we know that population of solution with both C# and VB are very tiny. // so no reason to pay the cost for common cases. var crossLanguageSolutionOpt = project.ProjectReferences.Any(p => project.Solution.GetProject(p.ProjectId)?.Language != project.Language) ? project.Solution : null; var metadataSymbolUsed = new HashSet <ISymbol>(SymbolEqualityComparer.Default); foreach (var document in project.Documents) { var root = await document.GetSyntaxRootAsync(cancellationToken).ConfigureAwait(false); var model = await document.GetSemanticModelAsync(cancellationToken).ConfigureAwait(false); foreach (var operation in GetOperations(model, cancellationToken)) { cancellationToken.ThrowIfCancellationRequested(); if (metadataSymbolUsed.Count > Max) { // collect data up to max per project break; } // this only gather reference and method call symbols but not type being used. // if we want all types from metadata used, we need to add more cases // which will make things more expansive. CollectApisUsed(operation, crossLanguageSolutionOpt, metadataSymbolUsed, cancellationToken); } } var groupByAssembly = metadataSymbolUsed.GroupBy(symbol => symbol.ContainingAssembly); var apiPerAssembly = groupByAssembly.Select(assemblyGroup => new { // mark all string as PII (customer data) AssemblyName = new TelemetryPiiProperty(assemblyGroup.Key.Identity.Name), AssemblyVersion = assemblyGroup.Key.Identity.Version.ToString(), Namespaces = assemblyGroup.GroupBy(symbol => symbol.ContainingNamespace) .Select(namespaceGroup => { var namespaceName = namespaceGroup.Key?.ToString() ?? string.Empty; return(new { Namespace = new TelemetryPiiProperty(namespaceName), Symbols = namespaceGroup.Select(symbol => symbol.GetDocumentationCommentId()) .Where(id => id != null) .Select(id => new TelemetryPiiProperty(id)) }); }) }); lock (_reported) { if (_reported.Add(project.Id)) { var solutionSessionId = project.Solution.State.SolutionAttributes.TelemetryId.ToString("B"); var projectGuid = project.State.ProjectInfo.Attributes.TelemetryId.ToString("B"); // use telemetry API directly rather than Logger abstraction for PII data var telemetryEvent = new TelemetryEvent(EventName); telemetryEvent.Properties[ApiPropertyName] = new TelemetryComplexProperty(apiPerAssembly); telemetryEvent.Properties[SessionIdPropertyName] = new TelemetryPiiProperty(solutionSessionId); telemetryEvent.Properties[ProjectIdPropertyName] = new TelemetryPiiProperty(projectGuid); try { RoslynServices.TelemetrySession?.PostEvent(telemetryEvent); } catch { // don't crash OOP because we failed to send telemetry } } } return;
private void BindVehicles(IMobileSpaceObject selected = null) { var vehicles = new HashSet <IVehicle>(); // find vehicles in sector that are not fleets foreach (var v in sector.SpaceObjects.OfType <SpaceVehicle>().OwnedBy(Empire.Current)) { vehicles.Add(v); } // add vehicles that are being removed from fleets (but not fleets themselves, those go in the fleets tree) foreach (var v in newCommands.OfType <LeaveFleetCommand>().Select(c => c.Executor).OfType <SpaceVehicle>()) { vehicles.Add(v); } foreach (var v in newCommands.OfType <DisbandFleetCommand>().SelectMany(c => c.Executor.Vehicles.OfType <SpaceVehicle>())) { vehicles.Add(v); } // remove vehicles that are being added to fleets foreach (var v in newCommands.OfType <JoinFleetCommand>().Select(c => c.Executor).OfType <SpaceVehicle>()) { vehicles.Remove(v); } // make a tree of vehicles treeVehicles.Initialize(32); foreach (var vtGroup in vehicles.GroupBy(v => v.Design.VehicleType)) { var vtNode = treeVehicles.AddItemWithImage(vtGroup.Key.ToSpacedString(), vtGroup.Key, Pictures.GetVehicleTypeImage(Empire.Current.ShipsetPath, vtGroup.Key)); foreach (var roleGroup in vtGroup.GroupBy(v => v.Design.Role)) { var roleNode = vtNode.AddItemWithImage(roleGroup.Key, roleGroup.Key, Pictures.GetVehicleTypeImage(Empire.Current.ShipsetPath, vtGroup.Key)); foreach (var designGroup in roleGroup.GroupBy(v => v.Design)) { var designNode = roleNode.AddItemWithImage(designGroup.Key.Name, designGroup.Key, designGroup.Key.Icon); foreach (var vehicle in designGroup) { TreeNode vehicleNode; if (vehicle is IMobileSpaceObject sobj) // yay pattern matching! :D { vehicleNode = designNode.AddItemWithImage(vehicle.Name + ": " + CalculateStatus(sobj), vehicle, vehicle.Icon); } else { vehicleNode = designNode.AddItemWithImage(vehicle.Name, vehicle, vehicle.Icon); } if (vehicle == selected) { treeVehicles.SelectedNode = vehicleNode; } } } } if (vtNode.Nodes.Count == 0) { vtNode.Remove(); } } // expand the treeeee! treeVehicles.ExpandAll(); }
/// <summary> /// Gets the Urls of the content item. /// </summary> /// <remarks> /// <para>Use when displaying Urls. If errors occur when generating the Urls, they will show in the list.</para> /// <para>Contains all the Urls that we can figure out (based upon domains, etc).</para> /// </remarks> public static IEnumerable <UrlInfo> GetContentUrls(this IContent content, IPublishedRouter publishedRouter, UmbracoContext umbracoContext, ILocalizationService localizationService, ILocalizedTextService textService, IContentService contentService, ILogger logger) { if (content == null) { throw new ArgumentNullException(nameof(content)); } if (publishedRouter == null) { throw new ArgumentNullException(nameof(publishedRouter)); } if (umbracoContext == null) { throw new ArgumentNullException(nameof(umbracoContext)); } if (localizationService == null) { throw new ArgumentNullException(nameof(localizationService)); } if (textService == null) { throw new ArgumentNullException(nameof(textService)); } if (contentService == null) { throw new ArgumentNullException(nameof(contentService)); } if (logger == null) { throw new ArgumentNullException(nameof(logger)); } if (content.Published == false) { yield return(UrlInfo.Message(textService.Localize("content/itemNotPublished"))); yield break; } // build a list of urls, for the back-office // which will contain // - the 'main' urls, which is what .Url would return, for each culture // - the 'other' urls we know (based upon domains, etc) // // need to work through each installed culture: // on invariant nodes, each culture returns the same url segment but, // we don't know if the branch to this content is invariant, so we need to ask // for URLs for all cultures. // and, not only for those assigned to domains in the branch, because we want // to show what GetUrl() would return, for every culture. var urls = new HashSet <UrlInfo>(); var cultures = localizationService.GetAllLanguages().Select(x => x.IsoCode).ToList(); //get all URLs for all cultures //in a HashSet, so de-duplicates too foreach (var cultureUrl in GetContentUrlsByCulture(content, cultures, publishedRouter, umbracoContext, contentService, textService, logger)) { urls.Add(cultureUrl); } //return the real urls first, then the messages foreach (var urlGroup in urls.GroupBy(x => x.IsUrl).OrderByDescending(x => x.Key)) { //in some cases there will be the same URL for multiple cultures: // * The entire branch is invariant // * If there are less domain/cultures assigned to the branch than the number of cultures/languages installed foreach (var dUrl in urlGroup.DistinctBy(x => x.Text.ToUpperInvariant()).OrderBy(x => x.Text).ThenBy(x => x.Culture)) { yield return(dUrl); } } // get the 'other' urls - ie not what you'd get with GetUrl() but urls that would route to the document, nevertheless. // for these 'other' urls, we don't check whether they are routable, collide, anything - we just report them. foreach (var otherUrl in umbracoContext.UrlProvider.GetOtherUrls(content.Id).OrderBy(x => x.Text).ThenBy(x => x.Culture)) { if (urls.Add(otherUrl)) //avoid duplicates { yield return(otherUrl); } } }
public override string Part2() { //Strategy 2: Of all guards, which guard is most frequently asleep on the same minute? //In the example above, Guard #99 spent minute 45 asleep more than any other guard or minute - three times in total. //(In all other cases, any guard spent any minute asleep at most twice.) //What is the ID of the guard you chose multiplied by the minute you chose ? (In the above example, the answer would be 99 * 45 = 4455.) var shifts = PuzzleInput.Split(new[] { "\r\n" }, StringSplitOptions.None).Select(c => c.ToCharArray()).Select(c => new Shift() { TimeOfEvent = Convert.ToDateTime(new string(c.Skip(1).Take(16).ToArray())), ShiftEvent = GetShiftEvent(c.Skip(19).First()), GuardOnDuty = GetShiftEvent(c.Skip(19).First()) == ShiftEvent.Start ? new Guard() { ID = Convert.ToInt32(new string(c.SkipWhile(d => d != '#').Skip(1).TakeWhile(d => d != ' ').ToArray())) } : null }); //Order shifts shifts = shifts.OrderBy(c => c.TimeOfEvent); var allGuardsShifts = new HashSet <Guard>(); Guard guard = null; foreach (var shift in shifts) { switch (shift.ShiftEvent) { case ShiftEvent.Start: if (guard != null) { allGuardsShifts.Add(guard); } guard = shift.GuardOnDuty; guard.SleepPeriods = new HashSet <Period>(); break; case ShiftEvent.WakesUp: guard.SleepPeriods.First(c => !c.endTime.HasValue).endTime = shift.TimeOfEvent.AddMinutes(0); break; case ShiftEvent.FallsAsleep: guard.SleepPeriods.Add(new Period() { startTime = shift.TimeOfEvent }); break; default: throw new NotSupportedException("Event does not exist"); } } allGuardsShifts.Add(guard); //Group periods per Guard var guardsMostOccuentMinuteAsleep = allGuardsShifts .GroupBy(c => c.ID) .Select(c => new { ID = c.Key, SleepPeriods = c.SelectMany(d => d.SleepPeriods).ToHashSet() }) .Select(c => new { guardID = c.ID, sleepMinutes = MostOccurentMinute(c.SleepPeriods) }).ToHashSet(); var guardMostAsleep = guardsMostOccuentMinuteAsleep .Where(c => c.sleepMinutes.Item2 == guardsMostOccuentMinuteAsleep.Max(d => d.sleepMinutes.Item2)) .Select(c => new { sleepMinute = c.sleepMinutes.Item1, c.guardID }).First(); return((guardMostAsleep.guardID * guardMostAsleep.sleepMinute).ToString()); }
private static int RunMapCommand(Options options) { _time = Stopwatch.StartNew(); bool isInteractiveMode = false; if (!options.Loaded) { return(-1); } // Parameter Validation try { if (options.LimitX != null) { var splittedLimit = options.LimitX.Split(',').Select(x => Convert.ToInt32(x)).OrderBy(x => x).ToArray(); if (splittedLimit.Length != 2) { throw new ArgumentOutOfRangeException("LimitX"); } options.LimitXLow = splittedLimit[0]; options.LimitXHigh = splittedLimit[1]; } } catch (Exception) { Console.WriteLine( $"The value '{options.LimitX}' for the LimitX parameter is not valid. Try something like -10,10"); return(-1); } try { if (options.LimitZ != null) { var splittedLimit = options.LimitZ.Split(',').Select(x => Convert.ToInt32(x)).OrderBy(x => x).ToArray(); if (splittedLimit.Length != 2) { throw new ArgumentOutOfRangeException("LimitZ"); } options.LimitZLow = splittedLimit[0]; options.LimitZHigh = splittedLimit[1]; } } catch (Exception) { Console.WriteLine( $"The value '{options.LimitZ}' for the LimitZ parameter is not valid. Try something like -10,10"); return(-1); } options.FileFormat = options.FileFormat.ToLowerInvariant(); if (new string[] { "jpg", "png", "webp", "none" }.All(x => x != options.FileFormat)) { Console.WriteLine($"The value {options.FileFormat} is not allowed for option -f"); return(-1); } if (String.IsNullOrEmpty(options.MinecraftWorld)) { if (InteractiveMode(options)) { isInteractiveMode = true; } else { return(-1); } } var world = new World(); try { Console.WriteLine("Opening world..."); world.Open(options.MinecraftWorld); } catch (Exception ex) { Console.WriteLine($"Could not open world at '{options.MinecraftWorld}'!. Did you specify the .../db folder?"); Console.WriteLine("The reason was:"); Console.WriteLine(ex.Message); return(-1); } // Start Generation if (options.RenderMap) { int xmin = 0; int xmax = 0; int zmin = 0; int zmax = 0; HashSet <LevelDbWorldKey2> allSubChunks = null; Func <LevelDbWorldKey2, bool> constraintX = b => true; Func <LevelDbWorldKey2, bool> constraintZ = b => true; if (options.LimitXLow.HasValue && options.LimitXHigh.HasValue) { xmin = options.LimitXLow.Value; xmax = options.LimitXHigh.Value; Console.WriteLine($"Limiting X to {xmin} to {xmax}"); var xmax1 = xmax; var xmin1 = xmin; constraintX = key => key.X >= xmin1 && key.X <= xmax1; } if (options.LimitZLow.HasValue && options.LimitZHigh.HasValue) { zmin = options.LimitZLow.Value; zmax = options.LimitZHigh.Value; Console.WriteLine($"Limiting Z to {zmin} to {zmax}"); var zmax1 = zmax; var zmin1 = zmin; constraintZ = key => key.Z >= zmin1 && key.Z <= zmax1; } if (options.Dimension == 1 && options.NoAutoTrimCeiling == false) { // Nether options.TrimCeiling = true; if (options.LimitY == -1) { options.LimitY = 120; } } Console.WriteLine("Generating a list of all chunk keys in the database.\nThis could take a few minutes"); var keys = world.GetDimension(options.Dimension).ToList(); allSubChunks = Enumerable.ToHashSet(keys.Select(x => new LevelDbWorldKey2(x)) .Where(k => constraintX(k) && constraintZ(k))); _totalChunk = allSubChunks.GroupBy(x => x.XZ).Count(); GC.Collect(); GC.WaitForPendingFinalizers(); Console.WriteLine($"Total Chunk count {_totalChunk}"); Console.WriteLine(); xmin = allSubChunks.Min(x => x.X); xmax = allSubChunks.Max(x => x.X); zmin = allSubChunks.Min(x => x.Z); zmax = allSubChunks.Max(x => x.Z); Console.WriteLine($"The total dimensions of the map are"); Console.WriteLine($" X: {xmin} to {xmax}"); Console.WriteLine($" Z: {zmin} to {zmax}"); Console.WriteLine(); if (options.LimitY > 0) { Console.WriteLine($"Limiting Y to {options.LimitY}"); } const int chunkSize = 256; int chunksPerDimension = options.ChunksPerDimension; int tileSize = chunkSize * chunksPerDimension; Console.WriteLine($"Tilesize is {tileSize}x{tileSize}"); if (String.IsNullOrEmpty(options.OutputPath)) { options.OutputPath = Path.Combine("generatedmaps", world.WorldName); Console.WriteLine($"Output folder not specified, defaulting to {options.OutputPath}"); } if (!Directory.Exists(options.OutputPath)) { Directory.CreateDirectory(options.OutputPath); } // db stuff var textures = ReadTerrainTextureJson(); var zoom = CalculateZoom(xmax, xmin, zmax, zmin, chunksPerDimension, out var extendedDia); var strat = InstanciateStrategy(options); ConfigureStrategy(strat, options, allSubChunks, extendedDia, zoom, world, textures, tileSize, chunkSize, zmin, zmax, xmin, xmax); strat.Init(); // other stuff strat.RenderInitialLevel(); var missingTextures = strat.MissingTextures; if (missingTextures != null) { File.WriteAllLines("missingtextures.txt", missingTextures.Distinct()); } Console.WriteLine("Time is {0}", _time.Elapsed); strat.RenderZoomLevels(); var output = new OpenLayers(); output.OutputMap(tileSize, options.OutputPath, options.MapHtml, strat.GetSettings(), strat.IsUpdate, options.ShowPlayerIcons, world); strat.Finish(); Console.WriteLine("Total Time {0}", _time.Elapsed); } Console.WriteLine("Map generation finished!"); if (options.OutputPath.Length >= 2 && options.OutputPath[1] == ':') // absolute path { Console.WriteLine($"Your map is at {options.OutputPath}"); } else { Console.WriteLine(); Console.WriteLine(); Console.WriteLine($"Your map is at {Path.Combine(Environment.CurrentDirectory, options.OutputPath)}"); Console.WriteLine(); Console.WriteLine(); } if (isInteractiveMode) { Console.WriteLine("Press enter to close this window!"); Console.ReadLine(); } world.Close(); return(0); }
public override string Part1() { var shifts = PuzzleInput.Split(new[] { "\r\n" }, StringSplitOptions.None).Select(c => c.ToCharArray()).Select(c => new Shift() { TimeOfEvent = Convert.ToDateTime(new string(c.Skip(1).Take(16).ToArray())), ShiftEvent = GetShiftEvent(c.Skip(19).First()), GuardOnDuty = GetShiftEvent(c.Skip(19).First()) == ShiftEvent.Start ? new Guard() { ID = Convert.ToInt32(new string(c.SkipWhile(d => d != '#').Skip(1).TakeWhile(d => d != ' ').ToArray())) } : null }); //Order shifts shifts = shifts.OrderBy(c => c.TimeOfEvent); var allGuardsShifts = new HashSet <Guard>(); Guard guard = null; foreach (var shift in shifts) { switch (shift.ShiftEvent) { case ShiftEvent.Start: if (guard != null) { allGuardsShifts.Add(guard); } guard = shift.GuardOnDuty; guard.SleepPeriods = new HashSet <Period>(); break; case ShiftEvent.WakesUp: guard.SleepPeriods.First(c => !c.endTime.HasValue).endTime = shift.TimeOfEvent.AddMinutes(0); break; case ShiftEvent.FallsAsleep: guard.SleepPeriods.Add(new Period() { startTime = shift.TimeOfEvent }); break; default: throw new NotSupportedException("Event does not exist"); } } var groupedGuards = allGuardsShifts.GroupBy(c => c.ID).Select(c => new Guard() { ID = c.Key, SleepPeriods = c.SelectMany(d => d.SleepPeriods).ToHashSet() }).ToHashSet(); var aslaap = groupedGuards.Select(c => new { guardID = c.ID, minutesAsleep = AmountMinutes(c.SleepPeriods) }); var guardMostAsleep = aslaap.Where(c => c.minutesAsleep == aslaap.Max(d => d.minutesAsleep)).First(); //which minute? var guardToEvaluate = groupedGuards.Where(c => c.ID == guardMostAsleep.guardID).First(); var peakMinute = peakMinuteAsleep(guardToEvaluate.SleepPeriods); return((peakMinute * guardToEvaluate.ID).ToString()); }
private static ImmutableHashSet <ResourceDependency> OptimizeDependencies(HashSet <ResourceDependency> dependencies) => dependencies .GroupBy(dep => dep.Resource) .SelectMany(group => @group.FirstOrDefault(dep => dep.IndexExpression == null) is { } dependencyWithoutIndex
//Making Dataset and saving in txt file public static Boolean makeTextDatasetList(HashSet <DatasetImageElements> hashSetObj, String saveFilename) { StringBuilder str = new StringBuilder(); //contain bytecode of image StringBuilder pictureCodeVector = new StringBuilder(); var groupByCharaters = hashSetObj.GroupBy(x => x.GetCharater()).OrderBy(x => x.Key); int countOfKeys = groupByCharaters.Count(); int countOfPixels = DatasetImageElements.ImageHeightConst * DatasetImageElements.ImageWidthConst; for (int i = 0; i < countOfPixels; i++) { str.Append("pix_"); str.Append(i + 1); str.Append("\t"); } foreach (var groupItem in groupByCharaters) { str.Append("\t"); str.Append(groupItem.Key); } try { using (StreamWriter sw = new StreamWriter(saveFilename, false, System.Text.Encoding.Default)) { sw.WriteLine(str); foreach (var hashSetElement in hashSetObj) { str.Clear(); pictureCodeVector.Clear(); pictureCodeVector.Append(hashSetElement.GetPictureStringFormat()); int numOfPixels = pictureCodeVector.Length; for (int i = 0; i < numOfPixels; i++) { str.Append(pictureCodeVector[i]); str.Append("\t"); } foreach (var groupItem in groupByCharaters) { if (hashSetElement.GetCharater().Equals(groupItem.Key)) { str.Append("1"); str.Append("\t"); } else { str.Append("0"); str.Append("\t"); } } sw.WriteLine(str); } } } catch (Exception e) { return(false); } finally { } return(true); }
public void ExportProductRecommendations(IEnumerable <EntryContentBase> entries, IExportState exportState) { if (!_configuration.ProductRecommendationsImportUrl.IsValidProductRecommendationsImportUrl()) { return; } var allAssociations = new HashSet <Association>(); var entriesToDelete = new HashSet <EntryContentBase>(ContentComparer.Default); foreach (EntryContentBase entry in entries .Distinct(ContentComparer.Default) .OfType <EntryContentBase>()) { var associations = (ICollection <Association>)_associationRepository.GetAssociations(entry.ContentLink); if (associations.Count == 0) { entriesToDelete.Add(entry); } else { foreach (Association association in associations) { allAssociations.Add(association); } } } foreach (var associationsByGroup in allAssociations .GroupBy(a => a.Group.Name)) { if (exportState != null) { exportState.Action = "Exported"; exportState.ModelName = $"product associations ({associationsByGroup.Key})"; exportState.Total = associationsByGroup.Count(); exportState.Uploaded = 0; } var recommendationGroups = associationsByGroup .GroupBy(a => a.Source) .Select(g => _productFactory.BuildKaChingRecommendationGroup(g.Key, g.ToArray())) .Where(x => x != null); foreach (var group in recommendationGroups .Batch(BatchSize)) { APIFacade.Post( new { products = group }, _configuration.ProductRecommendationsImportUrl + "&recommendation_id=" + associationsByGroup.Key.SanitizeKey()); if (exportState != null) { exportState.Uploaded += group.Count; } } } if (entriesToDelete.Count == 0) { return; } foreach (var associationGroup in _associationGroupRepository.List()) { if (exportState != null) { exportState.Action = "Deleted"; exportState.ModelName = $"product associations ({associationGroup.Name})"; exportState.Total = entriesToDelete.Count; exportState.Uploaded = 0; } foreach (var batch in entriesToDelete .Select(c => c.Code.SanitizeKey()) .Batch(BatchSize)) { // Call the external endpoint asynchronously and return immediately. Task.Factory.StartNew(() => APIFacade.DeleteObjectAsync( batch, _configuration.ProductRecommendationsImportUrl + "&recommendation_id=" + associationGroup.Name.SanitizeKey()) .ConfigureAwait(false)); if (exportState != null) { exportState.Uploaded += batch.Count; } } } }
private static void PopulateGapsOfOldTree(HashSet <OrganizationTreeDescriptor> oldTree, HashSet <OrganizationTreeDescriptor> newTree, ResultReport resultReport, bool excludeMatchedElements, int previousCountMissingElements = -1) { int countUnmatchedChildrenOfMissingElement = 42; bool onlyOneOptionAvailable = false; bool matchingElementFound = false; OrganizationTreeDescriptor matchingElement; var missingElementsByDepth = oldTree.Where(x => x.HasBeenMatched == false && string.IsNullOrEmpty(x.ID) && string.IsNullOrEmpty(x.Name) && x.Depth >= 0) .GroupBy(x => x.Depth) .ToDictionary(t => t.Key, t => t.ToList()); int countMissingElements = missingElementsByDepth.Count(); Dictionary <int, List <OrganizationTreeDescriptor> > potentialMatchingElementsByDepth; if (excludeMatchedElements) { potentialMatchingElementsByDepth = newTree.Where(x => x.HasBeenMatched == false).GroupBy(g => g.Depth).ToDictionary(t => t.Key, t => t.ToList()); } else { potentialMatchingElementsByDepth = newTree.GroupBy(g => g.Depth).ToDictionary(t => t.Key, t => t.ToList()); } if (countMissingElements != previousCountMissingElements && countMissingElements > 0 && potentialMatchingElementsByDepth.Count() > 0) { foreach (var missingElementsPair in missingElementsByDepth) { foreach (var missingElement in missingElementsPair.Value) { if (potentialMatchingElementsByDepth.ContainsKey(missingElementsPair.Key)) { matchingElementFound = false; matchingElement = null; foreach (var potentialElement in potentialMatchingElementsByDepth[missingElementsPair.Key]) { onlyOneOptionAvailable = potentialMatchingElementsByDepth[missingElementsPair.Key].Count() == 1; if ((potentialElement.ParentId == missingElement.ParentId || string.IsNullOrEmpty(missingElement.ParentId)) && potentialElement.Children.Count() >= missingElement.Children.Count()) { if (potentialElement.Children.Select(x => x.MatchedPartner).Count() > 0) { countUnmatchedChildrenOfMissingElement = missingElement.Children.Except(potentialElement.Children.Select(x => x.MatchedPartner).Union(potentialElement.Children)).Count(); } else { countUnmatchedChildrenOfMissingElement = missingElement.Children.Select(y => y.ID).Except(potentialElement.Children.Select(x => x.ID)).Count(); } if (countUnmatchedChildrenOfMissingElement == 0) { matchingElementFound = true; matchingElement = potentialElement; } } // if there is only one potential match, don't be picky if (onlyOneOptionAvailable && !matchingElementFound) { matchingElementFound = true; matchingElement = potentialElement; matchingElement.WasOnlyOption = true; resultReport.IdentifedDataBehaviors.Add(EnumIdentifiedDataBehavior.MATCHING_SINGLE_ELEMENT_GIVEN_DEPTH_MISMATCH); resultReport.UpdateSeverity(EnumResultSeverityType.WARNING); } } if (matchingElementFound) { if (!excludeMatchedElements) { matchingElement.UsedMoreThanOnce = true; resultReport.IdentifedDataBehaviors.Add(EnumIdentifiedDataBehavior.REUSED_ELEMENT_TO_FILL_GAP); } matchingElement.HasBeenMatched = true; matchingElement.MatchedPartner = missingElement; missingElement.HasBeenMatched = true; missingElement.MatchedPartner = matchingElement; matchingElement.IsImportedFromNewService = true; missingElement.IsMissing = false; matchingElement.IsMissing = false; matchingElement.Children.Clear(); matchingElement.Parent = missingElement.Parent; // replace in old tree foreach (var childFromMissing in missingElement.Children) { childFromMissing.Parent = matchingElement; matchingElement.Children.Add(childFromMissing); } if (missingElement.Parent != null && missingElement.Parent.Children != null) { missingElement.Parent.Children.Add(matchingElement); missingElement.Parent.Children.Remove(missingElement); } oldTree.Add(matchingElement); oldTree.Remove(missingElement); } } } } PopulateGapsOfOldTree(oldTree, newTree, resultReport, excludeMatchedElements, countMissingElements); } }
private static async Task <Document> ApplyFixesAsync(Document document, ImmutableArray <AddImportFixData> fixes, SyntaxFormattingOptions formattingOptions, CancellationToken cancellationToken) { if (fixes.IsEmpty) { return(document); } var solution = document.Project.Solution; var progressTracker = new ProgressTracker(); var textDiffingService = solution.Workspace.Services.GetRequiredService <IDocumentTextDifferencingService>(); var packageInstallerService = solution.Workspace.Services.GetService <IPackageInstallerService>(); var addImportService = document.GetRequiredLanguageService <IAddImportFeatureService>(); // Do not limit the results since we plan to fix all the reported issues. var codeActions = addImportService.GetCodeActionsForFixes(document, fixes, packageInstallerService, maxResults: int.MaxValue); var getChangesTasks = codeActions.Select( action => GetChangesForCodeActionAsync(document, action, progressTracker, textDiffingService, cancellationToken)); // Using Sets allows us to accumulate only the distinct changes. var allTextChanges = new HashSet <TextChange>(); // Some fixes require adding missing references. var allAddedProjectReferences = new HashSet <ProjectReference>(); var allAddedMetaDataReferences = new HashSet <MetadataReference>(); foreach (var getChangesTask in getChangesTasks) { var(projectChanges, textChanges) = await getChangesTask.ConfigureAwait(false); allTextChanges.UnionWith(textChanges); allAddedProjectReferences.UnionWith(projectChanges.GetAddedProjectReferences()); allAddedMetaDataReferences.UnionWith(projectChanges.GetAddedMetadataReferences()); } // Apply changes to both the project and document. var newProject = document.Project; newProject = newProject.AddMetadataReferences(allAddedMetaDataReferences); newProject = newProject.AddProjectReferences(allAddedProjectReferences); // Only consider insertion changes to reduce the chance of producing a // badly merged final document. Alphabetize the new imports, this will not // change the insertion point but will give a more correct result. The user // may still need to use organize imports afterwards. var orderedTextInserts = allTextChanges.Where(change => change.Span.IsEmpty) .OrderBy(change => change.NewText); // Capture each location where we are inserting imports as well as the total // length of the text we are inserting so that we can format the span afterwards. var insertSpans = allTextChanges .GroupBy(change => change.Span) .Select(changes => new TextSpan(changes.Key.Start, changes.Sum(change => change.NewText !.Length))); var text = await document.GetTextAsync(cancellationToken).ConfigureAwait(false); var newText = text.WithChanges(orderedTextInserts); var newDocument = newProject.GetRequiredDocument(document.Id).WithText(newText); // When imports are added to a code file that has no previous imports, extra // newlines are generated between each import because the fix is expecting to // separate the imports from the rest of the code file. We need to format the // imports to remove these extra newlines. return(await CleanUpNewLinesAsync(newDocument, insertSpans, formattingOptions, cancellationToken).ConfigureAwait(false)); }
public void ValidateMigrations() { var modules = migrations.GroupBy(x => x.ModuleName); foreach (var module in modules) { if (module.Any(x => x.IsRepeatable)) { if (module.GroupBy(x => GroupMigrationTags(x.Tags)).Any(x => x.Count() > 1)) { throw new DatabaseMigrationException( $"Failed to validate database migrations for module '{module.Key}': there can be only one migration if the module has a repeatable migration"); } var versioned = module.FirstOrDefault(x => x.Version != null); if (versioned != null) { throw new DatabaseMigrationException( $"Failed to validate database migrations for module '{module.Key}': repeatable migrations {versioned} cannot specify version (they are re-applied only when their contents changes)"); } var baseline = module.FirstOrDefault(x => x.IsBaseline); if (baseline != null) { throw new DatabaseMigrationException( $"Failed to validate database migrations for module '{module.Key}': {baseline} cannot be both baseline and repeatable"); } } else { if (module.GroupBy(x => GroupMigrationTags(x.Tags)).Any(group => group.Count(x => x.IsBaseline) > 1)) { throw new DatabaseMigrationException( $"Failed to validate database migrations for module '{module.Key}': there cannot be more than one baseline migration"); } var nonVersioned = module.FirstOrDefault(x => x.Version == null); if (nonVersioned != null) { throw new DatabaseMigrationException( $"Failed to validate database migrations for module '{module.Key}': {nonVersioned} must have a version"); } var multipleVersions = module.GroupBy(x => x.Version).Where(x => x.Count() > 1); foreach (var versionMigrations in multipleVersions) { if (versionMigrations.Any(x => versionMigrations.Any(y => x != y && x.IsBaseline == y.IsBaseline && x.Tags.Length == y.Tags.Length && x.Tags.All(xTagGroup => y.Tags.Any(yTagGroup => xTagGroup.Length == yTagGroup.Length && xTagGroup.All(yTagGroup.Contains)))))) { throw new DatabaseMigrationException( $"Failed to validate database migrations for module '{module.Key}': there are duplicate definitions for version {versionMigrations.Key}"); } } } // TODO check for cyclic dependencies } }
/// <summary> /// Generates a syntax tree for the provided assemblies. /// </summary> /// <param name="assemblies">The assemblies to generate code for.</param> /// <param name="runtime">Whether or not runtime code generation is being performed.</param> /// <returns>The generated syntax tree.</returns> private GeneratedSyntax GenerateForAssemblies(List <Assembly> assemblies, bool runtime) { if (Logger.IsVerbose) { Logger.Verbose( "Generating code for assemblies: {0}", string.Join(", ", assemblies.Select(_ => _.FullName))); } Assembly targetAssembly; HashSet <Type> ignoredTypes; if (runtime) { // Ignore types which have already been accounted for. ignoredTypes = GetTypesWithGeneratedSupportClasses(); targetAssembly = null; } else { ignoredTypes = new HashSet <Type>(); targetAssembly = assemblies.FirstOrDefault(); } var members = new List <MemberDeclarationSyntax>(); // Include assemblies which are marked as included. var knownAssemblyAttributes = new Dictionary <Assembly, KnownAssemblyAttribute>(); var knownAssemblies = new HashSet <Assembly>(); foreach (var attribute in assemblies.SelectMany(asm => asm.GetCustomAttributes <KnownAssemblyAttribute>())) { knownAssemblyAttributes[attribute.Assembly] = attribute; knownAssemblies.Add(attribute.Assembly); } if (knownAssemblies.Count > 0) { knownAssemblies.UnionWith(assemblies); assemblies = knownAssemblies.ToList(); } // Get types from assemblies which reference Orleans and are not generated assemblies. var includedTypes = new HashSet <Type>(); for (var i = 0; i < assemblies.Count; i++) { var assembly = assemblies[i]; foreach (var attribute in assembly.GetCustomAttributes <ConsiderForCodeGenerationAttribute>()) { ConsiderType(attribute.Type, runtime, targetAssembly, includedTypes, considerForSerialization: true); if (attribute.ThrowOnFailure && !serializerGenerationManager.IsTypeRecorded(attribute.Type)) { throw new CodeGenerationException( $"Found {attribute.GetType().Name} for type {attribute.Type.GetParseableName()}, but code" + " could not be generated. Ensure that the type is accessible."); } } KnownAssemblyAttribute knownAssemblyAttribute; var considerAllTypesForSerialization = knownAssemblyAttributes.TryGetValue(assembly, out knownAssemblyAttribute) && knownAssemblyAttribute.TreatTypesAsSerializable; foreach (var type in TypeUtils.GetDefinedTypes(assembly, Logger)) { var considerForSerialization = considerAllTypesForSerialization || type.IsSerializable; ConsiderType(type.AsType(), runtime, targetAssembly, includedTypes, considerForSerialization); } } includedTypes.RemoveWhere(_ => ignoredTypes.Contains(_)); // Group the types by namespace and generate the required code in each namespace. foreach (var group in includedTypes.GroupBy(_ => CodeGeneratorCommon.GetGeneratedNamespace(_))) { var namespaceMembers = new List <MemberDeclarationSyntax>(); foreach (var type in group) { // The module containing the serializer. var module = runtime ? null : type.GetTypeInfo().Module; // Every type which is encountered must be considered for serialization. Action <Type> onEncounteredType = encounteredType => { // If a type was encountered which can be accessed, process it for serialization. serializerGenerationManager.RecordTypeToGenerate(encounteredType, module, targetAssembly); }; if (Logger.IsVerbose2) { Logger.Verbose2("Generating code for: {0}", type.GetParseableName()); } if (GrainInterfaceUtils.IsGrainInterface(type)) { if (Logger.IsVerbose2) { Logger.Verbose2( "Generating GrainReference and MethodInvoker for {0}", type.GetParseableName()); } GrainInterfaceUtils.ValidateInterfaceRules(type); namespaceMembers.Add(GrainReferenceGenerator.GenerateClass(type, onEncounteredType)); namespaceMembers.Add(GrainMethodInvokerGenerator.GenerateClass(type)); } // Generate serializers. var first = true; Type toGen; while (serializerGenerationManager.GetNextTypeToProcess(out toGen)) { if (!runtime) { if (first) { ConsoleText.WriteStatus("ClientGenerator - Generating serializer classes for types:"); first = false; } ConsoleText.WriteStatus( "\ttype " + toGen.FullName + " in namespace " + toGen.Namespace + " defined in Assembly " + toGen.GetTypeInfo().Assembly.GetName()); } if (Logger.IsVerbose2) { Logger.Verbose2( "Generating & Registering Serializer for Type {0}", toGen.GetParseableName()); } namespaceMembers.Add(SerializerGenerator.GenerateClass(toGen, onEncounteredType)); } } if (namespaceMembers.Count == 0) { if (Logger.IsVerbose) { Logger.Verbose2("Skipping namespace: {0}", group.Key); } continue; } members.Add( SF.NamespaceDeclaration(SF.ParseName(group.Key)) .AddUsings( TypeUtils.GetNamespaces(typeof(TaskUtility), typeof(GrainExtensions), typeof(IntrospectionExtensions)) .Select(_ => SF.UsingDirective(SF.ParseName(_))) .ToArray()) .AddMembers(namespaceMembers.ToArray())); } return(new GeneratedSyntax { SourceAssemblies = assemblies, Syntax = members.Count > 0 ? SF.CompilationUnit().AddMembers(members.ToArray()) : null }); }
static void Main(string[] args) { string nullOrEmptyToExitApp = "_"; while (!string.IsNullOrEmpty(nullOrEmptyToExitApp)) { try { var voltas = new HashSet <Volta>(); Console.WriteLine("Cole aqui o endereço do log:"); var caminho = Console.ReadLine(); if (!File.Exists(caminho)) { throw new DirectoryNotFoundException(caminho); } foreach (var linha in File.ReadAllLines(caminho).Skip(1)) { voltas.Add(Util.ConvertToVolta(Util.SplitLog(linha))); } var results = voltas .GroupBy(c => c.piloto) .Select(r => new KeyValuePair <int, ResultadoCorrida>( r.Key.numero, new ResultadoCorrida() { posicaoChegada = null, codigoPiloto = r.Key.numero, nomePiloto = r.Key.nome, quantidadeVoltasCompletadas = r.Count(), tempoTotalProva = new TimeSpan(r.Sum(v => v.tempoVolta.Ticks)), completouCorrida = r.Max(v => v.numeroVolta) == 4, melhorVolta = new TimeSpan(r.Min(v => v.tempoVolta.Ticks)), velocidadeMediaCorrida = new TimeSpan(r.Sum(v => v.velocidadeMediaVolta.Ticks) / r.Count()), tempoChegadaAposVencedor = null } ) ).ToDictionary(i => i.Key, i => i.Value); Console.WriteLine("P - COD - Nome Piloto - V - Tempo Total - Tempo Após - Vel Média - Melhor volta"); int posicao = 1; long tempoChegadaAnterior = 0; int paddingNumeroPiloto = 3; int paddingNomePiloto = results.Values.Max(v => v.nomePiloto.Length); foreach (var result in results.Values.OrderBy(v => v.tempoTotalProva)) { if (result.completouCorrida) { result.tempoChegadaAposVencedor = new TimeSpan(tempoChegadaAnterior != 0 ? result.tempoTotalProva.Ticks - tempoChegadaAnterior : 0); tempoChegadaAnterior = result.tempoTotalProva.Ticks; result.posicaoChegada = posicao; Console.Write(result.posicaoChegada); Console.Write(" - "); Console.Write(result.codigoPiloto.ToString().PadLeft(paddingNumeroPiloto)); Console.Write(" - "); Console.Write(result.nomePiloto.PadRight(paddingNomePiloto)); Console.Write(" - "); Console.Write(result.quantidadeVoltasCompletadas); Console.Write(" - "); Console.Write(result.tempoTotalProva); Console.Write(" - "); Console.Write(result.posicaoChegada != 1 ? result.tempoChegadaAposVencedor.Value.ToString() : string.Empty.PadRight(16)); Console.Write(" - "); Console.Write(result.velocidadeMediaCorrida); Console.Write(" - "); Console.Write(result.melhorVolta); Console.WriteLine(); posicao++; } } Console.WriteLine($"Melhor volta da corrida: {results.Values.Min(v => v.melhorVolta)}"); } catch (Exception e) { Console.WriteLine(e.Message); Console.WriteLine(); Console.WriteLine(e); } nullOrEmptyToExitApp = Console.ReadLine(); } }
/// <summary> /// Generates a syntax tree for the provided assemblies. /// </summary> /// <param name="assemblies">The assemblies to generate code for.</param> /// <param name="runtime">Whether or not runtime code generation is being performed.</param> /// <returns>The generated syntax tree.</returns> private static GeneratedSyntax GenerateForAssemblies(List<Assembly> assemblies, bool runtime) { if (Logger.IsVerbose) { Logger.Verbose( "Generating code for assemblies: {0}", string.Join(", ", assemblies.Select(_ => _.FullName))); } Assembly targetAssembly; HashSet<Type> ignoredTypes; if (runtime) { // Ignore types which have already been accounted for. ignoredTypes = CodeGeneratorCommon.GetTypesWithImplementations( typeof(MethodInvokerAttribute), typeof(GrainReferenceAttribute), typeof(GrainStateAttribute), typeof(SerializerAttribute)); targetAssembly = null; } else { ignoredTypes = new HashSet<Type>(); targetAssembly = assemblies.FirstOrDefault(); } var members = new List<MemberDeclarationSyntax>(); // Get types from assemblies which reference Orleans and are not generated assemblies. var includedTypes = new HashSet<Type>(); foreach (var type in assemblies.SelectMany(_ => _.DefinedTypes)) { // The module containing the serializer. var module = runtime ? null : type.Module; var typeInfo = type.GetTypeInfo(); // Every type which is encountered must be considered for serialization. if (!typeInfo.IsNested && !typeInfo.IsGenericParameter && typeInfo.IsSerializable) { // If a type was encountered which can be accessed, process it for serialization. var isAccessibleForSerialization = !TypeUtilities.IsTypeIsInaccessibleForSerialization(type, module, targetAssembly); if (isAccessibleForSerialization) { includedTypes.Add(type); SerializerGenerationManager.RecordTypeToGenerate(type); } } // Collect the types which require code generation. if (GrainInterfaceData.IsGrainInterface(type)) { if (Logger.IsVerbose2) { Logger.Verbose2("Will generate code for: {0}", type.GetParseableName()); } includedTypes.Add(type); } } includedTypes.RemoveWhere(_ => ignoredTypes.Contains(_)); // Group the types by namespace and generate the required code in each namespace. foreach (var group in includedTypes.GroupBy(_ => CodeGeneratorCommon.GetGeneratedNamespace(_))) { var namespaceMembers = new List<MemberDeclarationSyntax>(); foreach (var type in group) { // The module containing the serializer. var module = runtime ? null : type.Module; // Every type which is encountered must be considered for serialization. Action<Type> onEncounteredType = encounteredType => { // If a type was encountered which can be accessed, process it for serialization. var isAccessibleForSerialization = !TypeUtilities.IsTypeIsInaccessibleForSerialization(encounteredType, module, targetAssembly); if (isAccessibleForSerialization) { SerializerGenerationManager.RecordTypeToGenerate(encounteredType); } }; if (Logger.IsVerbose2) { Logger.Verbose2("Generating code for: {0}", type.GetParseableName()); } if (GrainInterfaceData.IsGrainInterface(type)) { if (Logger.IsVerbose2) { Logger.Verbose2( "Generating GrainReference and MethodInvoker for {0}", type.GetParseableName()); } namespaceMembers.Add(GrainReferenceGenerator.GenerateClass(type, onEncounteredType)); namespaceMembers.Add(GrainMethodInvokerGenerator.GenerateClass(type)); } // Generate serializers. var first = true; Type toGen; while (SerializerGenerationManager.GetNextTypeToProcess(out toGen)) { // Filter types which are inaccessible by the serialzation module/assembly. var skipSerialzerGeneration = toGen.GetAllFields() .Any( field => TypeUtilities.IsTypeIsInaccessibleForSerialization( field.FieldType, module, targetAssembly)); if (skipSerialzerGeneration) { continue; } if (!runtime) { if (first) { ConsoleText.WriteStatus("ClientGenerator - Generating serializer classes for types:"); first = false; } ConsoleText.WriteStatus( "\ttype " + toGen.FullName + " in namespace " + toGen.Namespace + " defined in Assembly " + toGen.Assembly.GetName()); } if (Logger.IsVerbose2) { Logger.Verbose2( "Generating & Registering Serializer for Type {0}", toGen.GetParseableName()); } namespaceMembers.AddRange(SerializerGenerator.GenerateClass(toGen, onEncounteredType)); } } if (namespaceMembers.Count == 0) { if (Logger.IsVerbose) { Logger.Verbose2("Skipping namespace: {0}", group.Key); } continue; } members.Add( SF.NamespaceDeclaration(SF.ParseName(group.Key)) .AddUsings( TypeUtils.GetNamespaces(typeof(TaskUtility), typeof(GrainExtensions)) .Select(_ => SF.UsingDirective(SF.ParseName(_))) .ToArray()) .AddMembers(namespaceMembers.ToArray())); } return new GeneratedSyntax { SourceAssemblies = assemblies, Syntax = members.Count > 0 ? SF.CompilationUnit().AddMembers(members.ToArray()) : null }; }
public string AnalyzeSolution() { var maxCourseSb = new StringBuilder(); var unavailableSb = new StringBuilder(); var multipleRoomsSb = new StringBuilder(); var multipleCoursesSb = new StringBuilder(); var lecturerSb = new StringBuilder(); var curriculumSb = new StringBuilder(); var violations = 0.0; Objective = 0.0; IsFeasible = false; UnscheduledLectures = 0.0; RoomCapacity = 0.0; MinimumWorkingDays = 0.0; CurriculumCompactness = 0.0; RoomStability = 0.0; StudentMinMaxLoad = 0.0; BadTimeslots = 0.0; RoomUnsuitability = 0.0; RoomCost = 0.0; RoomsUsed = 0.0; var courseAssignments = _assignments.GroupBy(a => a.Course).ToDictionary(g => g.Key, g => g.ToList()); var curriculaAssignments = _data.Curricula.ToDictionary(curriculum => curriculum, curriculum => new List <Assignment>()); var consideredAloneTimeSlot = _data.Courses.ToDictionary(course => course, course => new HashSet <TimeSlot>()); PenaltyForCourse = new Dictionary <Course, int>(); foreach (var course in _data.Courses) { PenaltyForCourse[course] = 0; } if (!RoomAssignmentsExists && DoStageIRoomCheck) { CheckStageIFeasibility(); } foreach (var course in _data.Courses) { if (!courseAssignments.ContainsKey(course)) { UnscheduledLectures += course.Lectures; if (course.MinimumWorkingDays > 0) { MinimumWorkingDays += course.MinimumWorkingDays; } continue; } if (courseAssignments[course].Count > course.Lectures) { maxCourseSb.AppendFormat( "Course {0} has been scheduled for {1} lectures but is only allowed to be scheduled for {2}.\n", course, courseAssignments[course].Count, course.Lectures); violations += courseAssignments[course].Count - course.Lectures; } else if (courseAssignments[course].Count < course.Lectures) { UnscheduledLectures += course.Lectures - courseAssignments[course].Count; } foreach (var curriculum in course.Curricula) { foreach (var assignment in courseAssignments[course]) { curriculaAssignments[curriculum].Add(assignment); } } var timeSlotAssignments = courseAssignments[course].GroupBy(ca => ca.TimeSlot).ToDictionary(g => g.Key, g => g.ToList()); foreach (var timeSlotAssign in timeSlotAssignments) { BadTimeslots += timeSlotAssign.Key.Cost * timeSlotAssign.Value.Count; PenaltyForCourse[course] += timeSlotAssign.Key.Cost * timeSlotAssign.Value.Count; if (course.UnavailableTimeSlots.Contains(timeSlotAssign.Key)) { unavailableSb.AppendFormat( "Course {0} has been scheduled at day {1}, period {2} but this time slot is unavailable for this course.\n", course, timeSlotAssign.Key.Day, timeSlotAssign.Key.Period); violations += 1.0; } if (timeSlotAssign.Value.Count <= 1) { continue; } multipleRoomsSb.AppendFormat( "Course {0} has been scheduled day {1}, period {2} in rooms {3}\n", course, timeSlotAssign.Key.Day, timeSlotAssign.Key.Period, string.Join(", ", timeSlotAssign.Value.Select(a => a.Room))); violations += timeSlotAssign.Value.Count - 1.0; } var workDays = timeSlotAssignments.Select(t => t.Key.Day).Distinct().ToList().Count; if (workDays < course.MinimumWorkingDays) { MinimumWorkingDays += (course.MinimumWorkingDays - workDays); } if (RoomAssignmentsExists) { RoomStability += (courseAssignments[course].Select(a => a.Room).Distinct().Count() - 1); } } foreach (var timeSlotAssigns in _data.Curricula.Select(curriculum => curriculaAssignments[curriculum].Select(a => a.TimeSlot).Distinct().ToList())) { CurriculumCompactness += timeSlotAssigns.Count( timeSlotAssign => !timeSlotAssigns.Any(ts => TimeSlot.TimeSlotsAreConsequtive(ts, timeSlotAssign))); } if (RoomAssignmentsExists) { var roomAssignments = _assignments.GroupBy(a => a.Room).ToDictionary(g => g.Key, g => g.ToList()); foreach (var roomAssign in roomAssignments) { var timeSlotAssigns = roomAssign.Value.GroupBy(a => a.TimeSlot) .ToDictionary(g => g.Key, g => g.Distinct().ToList()); foreach (var timeSlotAssign in timeSlotAssigns) { var courseAssigns = timeSlotAssign.Value.Select(a => a.Course).Distinct().ToList(); if (courseAssigns.Count > 1) { multipleCoursesSb.AppendFormat( "Room {0} has been scheduled at day {1}, period {2} for courses {3}\n", roomAssign.Key, timeSlotAssign.Key.Day, timeSlotAssign.Key.Period, string.Join(", ", courseAssigns)); violations += timeSlotAssign.Value.Count - 1.0; } var students = timeSlotAssign.Value.Sum(a => a.Course.NumberOfStudents); if (students > roomAssign.Key.Capacity) { RoomCapacity += students - roomAssign.Key.Capacity; } } if (roomAssign.Value.Count > 0) { RoomsUsed += 1; RoomCost += roomAssign.Key.Cost; } RoomUnsuitability += roomAssign.Value.Count(a => a.Course.UnsuitableRooms.Contains(roomAssign.Key)); } } var lecturerAssigns = _assignments.GroupBy(a => a.Course.Lecturer).ToDictionary(g => g.Key, g => g.ToList()); foreach (var lecturerAssign in lecturerAssigns) { var timeSlotAssigns = lecturerAssign.Value.GroupBy(l => l.TimeSlot).ToDictionary(g => g.Key, g => g.Distinct().ToList()); foreach (var timeSlotAssign in timeSlotAssigns) { var roomsAssigns = timeSlotAssign.Value.Select(a => a.Course).Distinct().ToList(); if (roomsAssigns.Count <= 1) { continue; } lecturerSb.AppendFormat( "Lecturer {0} has been scheduled at day {1}, period {2} for courses {3}\n", lecturerAssign.Key, timeSlotAssign.Key.Day, timeSlotAssign.Key.Period, string.Join(", ", roomsAssigns)); violations += timeSlotAssign.Value.Count - 1.0; } } foreach (var curriculumAssign in curriculaAssignments) { var timeSlotAssigns = curriculumAssign.Value.GroupBy(cu => cu.TimeSlot).ToDictionary(g => g.Key, g => g.Distinct().ToList()); foreach (var timeSlotAssign in timeSlotAssigns) { var courseAssigns = timeSlotAssign.Value.Select(a => a.Course).Distinct().ToList(); if (courseAssigns.Count <= 1) { continue; } curriculumSb.AppendFormat( "Curriculum {0} has been scheduled at day {1}, period {2} for courses {3}\n", curriculumAssign.Key, timeSlotAssign.Key.Day, timeSlotAssign.Key.Period, string.Join(", ", courseAssigns)); violations += timeSlotAssign.Value.Count - 1.0; } var dayAssign = curriculumAssign.Value.GroupBy(a => a.TimeSlot.Day).ToDictionary(g => g.Key, g => g.Count()); foreach (var day in dayAssign) { var minMaxLoadViol = Math.Max(day.Value - _data.MaximumPeriodsPerDay, 0) + Math.Max(_data.MinimumPeriodsPerDay - day.Value, 0); StudentMinMaxLoad += minMaxLoadViol; if (minMaxLoadViol > 0.5) { // Console.WriteLine($"S:StudentloadViol: {curriculumAssign.Key} {day.Key}: {minMaxLoadViol}"); } } } var sB = new StringBuilder(); if (violations > 0.0) { if (maxCourseSb.Length > 0) { sB.AppendLine("Each course is only allowed to be scheduled for a given maximum number of lectures\n"); sB.AppendLine(maxCourseSb.ToString()); } if (unavailableSb.Length > 0) { if (maxCourseSb.Length > 0) { sB.AppendLine(); } sB.AppendLine("Courses are not allowed to be scheduled in time slots marked as unavailable\n"); sB.AppendLine(unavailableSb.ToString()); } if (multipleRoomsSb.Length > 0) { if ((maxCourseSb.Length > 0 | unavailableSb.Length > 0)) { sB.AppendLine(); } sB.AppendLine("Each course is only allowed to be scheduled in a single room in each time slot\n"); sB.AppendLine(multipleRoomsSb.ToString()); } if (multipleCoursesSb.Length > 0) { if ((maxCourseSb.Length | unavailableSb.Length | multipleRoomsSb.Length) > 0) { sB.AppendLine(); } sB.AppendLine("Each room can only accommodate a single course in each time slot\n"); sB.AppendLine(multipleCoursesSb.ToString()); } if (lecturerSb.Length > 0) { if ((maxCourseSb.Length | unavailableSb.Length | multipleRoomsSb.Length | multipleCoursesSb.Length) > 0) { sB.AppendLine(); } sB.AppendLine("Each lecturer can only teach a single course in each time slot\n"); sB.AppendLine(lecturerSb.ToString()); } if (curriculumSb.Length > 0) { if ((maxCourseSb.Length | unavailableSb.Length | multipleRoomsSb.Length | multipleCoursesSb.Length | lecturerSb.Length) > 0) { sB.AppendLine(); } sB.AppendLine("Only a single course in a curriculum can be scheduled in each time slot\n"); sB.AppendLine(curriculumSb.ToString()); } ResultLine = "RESULT WRONG"; TextLine = "The solution is infeasible. The number of violations is " + violations; ScoreLine = "SCORE " + violations.ToString(); return(sB.ToString()); } IsFeasible = true; Objective = 0 + RoomCapacity * Formulation.RoomCapacityWeight + MinimumWorkingDays * Formulation.MinimumWorkingDaysWeight + CurriculumCompactness * Formulation.CurriculumCompactnessWeight + RoomStability * Formulation.RoomStabilityWeight + BadTimeslots * Formulation.BadTimeslotsWeight + RoomUnsuitability * Formulation.UnsuitableRoomsWeight + StudentMinMaxLoad * Formulation.StudentMinMaxLoadWeight; var calculatedWidth = "Violation".Length; sB.AppendLine("Table 1: Penalty values"); sB.AppendLine(); sB.AppendFormat("Name | {0} | {1} | \n", "Violation".PadRight(calculatedWidth), "Obj".PadLeft(calculatedWidth)); sB.AppendFormat("============================={0}\n", new string('=', calculatedWidth * 2), 0); sB.AppendFormat("UNSCHEDULED | {0} | {1} |\n", UnscheduledLectures.ToString().PadLeft(calculatedWidth), "".PadRight(calculatedWidth)); sB.AppendFormat("ROOMCAPACITY | {0} | {1} |\n", RoomCapacity.ToString().PadLeft(calculatedWidth), (RoomCapacity * Formulation.RoomCapacityWeight).ToString().PadLeft(calculatedWidth)); sB.AppendFormat("MINIMUMWORKINGDAYS | {0} | {1} |\n", MinimumWorkingDays.ToString().PadLeft(calculatedWidth), (MinimumWorkingDays * Formulation.MinimumWorkingDaysWeight).ToString().PadLeft(calculatedWidth)); sB.AppendFormat("CURRICULUMCOMPACTNESS | {0} | {1} |\n", CurriculumCompactness.ToString().PadLeft(calculatedWidth), (CurriculumCompactness * Formulation.CurriculumCompactnessWeight).ToString().PadLeft(calculatedWidth)); sB.AppendFormat("ROOMSTABILITY | {0} | {1} |\n", RoomStability.ToString().PadLeft(calculatedWidth), (RoomStability * Formulation.RoomStabilityWeight).ToString().PadLeft(calculatedWidth)); sB.AppendFormat("RoomUnsuitability | {0} | {1} |\n", RoomUnsuitability.ToString().PadLeft(calculatedWidth), (RoomUnsuitability * Formulation.UnsuitableRoomsWeight).ToString().PadLeft(calculatedWidth)); sB.AppendFormat("StudentMinMaxLoad | {0} | {1} |\n", StudentMinMaxLoad.ToString().PadLeft(calculatedWidth), (StudentMinMaxLoad * Formulation.StudentMinMaxLoadWeight).ToString().PadLeft(calculatedWidth)); sB.AppendFormat("BadTimeslots | {0} | {1} |\n", BadTimeslots.ToString().PadLeft(calculatedWidth), (BadTimeslots * Formulation.BadTimeslotsWeight).ToString().PadLeft(calculatedWidth)); sB.AppendFormat("OBJECTIVE | {0} | {1} |\n", " ".PadLeft(calculatedWidth), Objective.ToString().PadLeft(calculatedWidth)); sB.AppendLine(); sB.AppendLine(); sB.AppendLine("Table 2: Used Room stat"); sB.AppendLine(); sB.AppendLine("Name | Value"); sB.AppendLine("================================================================"); sB.AppendFormat("Used Rooms | {0}/{1} ({2:0.00%})\n", RoomsUsed, _data.Rooms.Count, RoomsUsed / _data.Rooms.Count); sB.AppendFormat("Room Cost | {0}/{1} ({2:0.00%})\n", RoomCost, _data.Rooms.Sum(r => r.Cost), RoomCost / _data.Rooms.Sum(r => r.Cost)); // sB.AppendFormat("Utilization | Total violation of minimum working days"); ResultLine = "RESULT CORRECT"; TextLine = "The solution is feasible. The Objective value is " + Objective; ScoreLine = "SCORE " + Objective; return(sB.ToString()); }
/// <summary> /// Generates a syntax tree for the provided assemblies. /// </summary> /// <param name="assemblies">The assemblies to generate code for.</param> /// <param name="runtime">Whether or not runtime code generation is being performed.</param> /// <returns>The generated syntax tree.</returns> private static GeneratedSyntax GenerateForAssemblies(List<Assembly> assemblies, bool runtime) { if (Logger.IsVerbose) { Logger.Verbose( "Generating code for assemblies: {0}", string.Join(", ", assemblies.Select(_ => _.FullName))); } Assembly targetAssembly; HashSet<Type> ignoredTypes; if (runtime) { // Ignore types which have already been accounted for. ignoredTypes = GetTypesWithGeneratedSupportClasses(); targetAssembly = null; } else { ignoredTypes = new HashSet<Type>(); targetAssembly = assemblies.FirstOrDefault(); } var members = new List<MemberDeclarationSyntax>(); // If any KnownAssemblies have been specified, include them during code generation. var knownAssemblies = assemblies.SelectMany(_ => _.GetCustomAttributes<KnownAssemblyAttribute>()) .Select(_ => _.Assembly) .Distinct() .ToSet(); if (knownAssemblies.Count > 0) { knownAssemblies.UnionWith(assemblies); assemblies = knownAssemblies.ToList(); } // Get types from assemblies which reference Orleans and are not generated assemblies. var includedTypes = new HashSet<Type>(); for (var i = 0; i < assemblies.Count; i++) { var assembly = assemblies[i]; foreach (var attribute in assembly.GetCustomAttributes<KnownTypeAttribute>()) { ConsiderType(attribute.Type, runtime, targetAssembly, includedTypes); } foreach (var type in assembly.DefinedTypes) { ConsiderType(type, runtime, targetAssembly, includedTypes); } } includedTypes.RemoveWhere(_ => ignoredTypes.Contains(_)); // Group the types by namespace and generate the required code in each namespace. foreach (var group in includedTypes.GroupBy(_ => CodeGeneratorCommon.GetGeneratedNamespace(_))) { var namespaceMembers = new List<MemberDeclarationSyntax>(); foreach (var type in group) { // The module containing the serializer. var module = runtime ? null : type.Module; // Every type which is encountered must be considered for serialization. Action<Type> onEncounteredType = encounteredType => { // If a type was encountered which can be accessed, process it for serialization. SerializerGenerationManager.RecordTypeToGenerate(encounteredType, module, targetAssembly); }; if (Logger.IsVerbose2) { Logger.Verbose2("Generating code for: {0}", type.GetParseableName()); } if (GrainInterfaceData.IsGrainInterface(type)) { if (Logger.IsVerbose2) { Logger.Verbose2( "Generating GrainReference and MethodInvoker for {0}", type.GetParseableName()); } GrainInterfaceData.ValidateInterfaceRules(type); namespaceMembers.Add(GrainReferenceGenerator.GenerateClass(type, onEncounteredType)); namespaceMembers.Add(GrainMethodInvokerGenerator.GenerateClass(type)); } // Generate serializers. var first = true; Type toGen; while (SerializerGenerationManager.GetNextTypeToProcess(out toGen)) { if (!runtime) { if (first) { ConsoleText.WriteStatus("ClientGenerator - Generating serializer classes for types:"); first = false; } ConsoleText.WriteStatus( "\ttype " + toGen.FullName + " in namespace " + toGen.Namespace + " defined in Assembly " + toGen.Assembly.GetName()); } if (Logger.IsVerbose2) { Logger.Verbose2( "Generating & Registering Serializer for Type {0}", toGen.GetParseableName()); } namespaceMembers.AddRange(SerializerGenerator.GenerateClass(toGen, onEncounteredType)); } } if (namespaceMembers.Count == 0) { if (Logger.IsVerbose) { Logger.Verbose2("Skipping namespace: {0}", group.Key); } continue; } members.Add( SF.NamespaceDeclaration(SF.ParseName(group.Key)) .AddUsings( TypeUtils.GetNamespaces(typeof(TaskUtility), typeof(GrainExtensions)) .Select(_ => SF.UsingDirective(SF.ParseName(_))) .ToArray()) .AddMembers(namespaceMembers.ToArray())); } return new GeneratedSyntax { SourceAssemblies = assemblies, Syntax = members.Count > 0 ? SF.CompilationUnit().AddMembers(members.ToArray()) : null }; }
public override HashSet <UserData> GetUserData() { using (Nop190DataContext context = GetContext()) { HashSet <Nop190_Customer> customers = context.Nop190_Customers.Where(x => !x.Deleted && x.Email != null && x.PasswordHash != null && !x.IsGuest).ToHashSet(); HashSet <Nop190_CustomerAttribute> attributes = context.Nop190_CustomerAttributes.ToHashSet(); Dictionary <int, HashSet <Nop190_CustomerAttribute> > attributeUserDictionary = attributes.GroupBy( x => x.CustomerId) .ToDictionary(grouping => grouping.Key, grouping => grouping.ToHashSet()); HashSet <Nop190_Address> addresses = context.Nop190_Addresses.ToHashSet(); var stateProvinces = GetStateProvinces(context); var countries = GetCountries(context); var userDatas = new HashSet <UserData>(); foreach (Nop190_Customer customer in customers) { Nop190_Customer thisCustomer = customer; Dictionary <string, string> customerAttributes = attributeUserDictionary.ContainsKey(customer.CustomerID) ? attributeUserDictionary[customer.CustomerID] .GroupBy(attribute => attribute.Key) .ToDictionary(grouping => grouping.Key, attribute => attribute.First().Value) : new Dictionary <string, string>(); HashSet <Nop190_Address> customerAddresses = addresses.FindAll(x => x.CustomerID == thisCustomer.CustomerID) .ToHashSet(); userDatas.Add(new UserData { Id = thisCustomer.CustomerID, Email = thisCustomer.Email, Salt = thisCustomer.SaltKey, Hash = thisCustomer.PasswordHash, Active = thisCustomer.Active, Format = "NopSHA1", Guid = thisCustomer.CustomerGUID, FirstName = customerAttributes.ContainsKey(FirstNameKey) ? customerAttributes[FirstNameKey] : string.Empty, LastName = customerAttributes.ContainsKey(LastNameKey) ? customerAttributes[LastNameKey] : string.Empty, AddressData = customerAddresses.Select(address => GetAddressDataObject(address, stateProvinces, countries)).ToHashSet() }); } return(userDatas); } }