BuildResult CreateResultByPreparingBuilds(IActivityMonitor m, bool forceRebuild) { BuildResult result = null; using (m.OpenInfo("Preparing builds.")) { if (!RunPrepareBuild(m)) { m.CloseGroup("Failed."); return(null); } result = CreateBuildResult(m); if (result == null) { return(null); } if (forceRebuild) { using (m.OpenInfo("Forcing rebuild: Removing already existing artifacts that will be produced from caches.")) { _localFeedProvider.RemoveFromAllCaches(m, result.GeneratedArtifacts.Select(g => g.Artifact)); _localFeedProvider.GetFeed(_type).Remove(m, result.GeneratedArtifacts.Select(g => g.Artifact)); } } m.CloseGroup("Success."); } return(result); }
/// <summary> /// forceBuggyRemove is not used here since this client is not lockable. /// </summary> void IActivityMonitorBoundClient.SetMonitor(Impl.IActivityMonitorImpl source, bool forceBuggyRemove) { if (source != null && _source != null) { throw ActivityMonitorClient.CreateMultipleRegisterOnBoundClientException(this); } if (_source != null) { _bridgeTarget.RemoveCallback(this); // Unregistering. for (int i = 0; i < _openedGroups.Count; ++i) { if (_openedGroups[i]) { _targetMonitor.CloseGroup(new ActivityLogGroupConclusion(ActivityMonitorResources.ClosedByBridgeRemoved, TagBridgePrematureClose)); } } _openedGroups.Clear(); } else { _bridgeTarget.AddCallback(this); _targetActualFilter = _bridgeTarget.TargetFinalFilter; if (_pullTargetTopicAndAutoTagsFromTarget) { source.InitializeTopicAndAutoTags(this._targetMonitor.Topic, _targetMonitor.AutoTags); } } _source = source; Interlocked.MemoryBarrier(); }
/// <summary> /// Launches one or more dependent activities (thanks to a delegate) that will use the current monitor's topic. /// This creates a new <see cref="ActivityMonitor.DependentToken"/> and opens a group that wraps the execution of the <paramref name="dependentLauncher"/>. /// </summary> /// <param name="dependentLauncher">Must create and launch dependent activities that should use the created token.</param> /// <returns>A dependent token.</returns> public void Launch(Action <ActivityMonitor.DependentToken> dependentLauncher) { if (dependentLauncher == null) { throw new ArgumentNullException(nameof(dependentLauncher)); } string msg; var t = ActivityMonitor.DependentToken.CreateWithMonitorTopic(_monitor, true, out msg); using (_monitor.UnfilteredOpenGroup(ActivityMonitor.Tags.CreateDependentActivity, LogLevel.Info, null, msg, t.CreationDate, null, _fileName, _lineNumber)) { dependentLauncher(t); _monitor.CloseGroup(_monitor.NextLogTime(), "Success."); } }
public void Run() { _monitor.Info().Send("ThreadContext{0}Begin", NumMonitor); foreach (var bc in _monitor.Output.Clients.OfType <BuggyClient>()) { bc.RunThreadId = Thread.CurrentThread.ManagedThreadId; } for (int i = 0; i < OperationCount; ++i) { double op = Rand.NextDouble(); if (op < 1.0 / 60) { _monitor.MinimalFilter = _monitor.MinimalFilter == LogFilter.Debug ? LogFilter.Verbose : LogFilter.Debug; } if (op < 1.0 / 3) { _monitor.Info().Send("OP-{0}-{1}", NumMonitor, i); } else if (op < 2.0 / 3) { _monitor.OpenInfo().Send("G-OP-{0}-{1}", NumMonitor, i); } else { _monitor.CloseGroup(); } } _monitor.Info().Send("ThreadContext{0}End", NumMonitor); }
bool RunPrepareBuild(IActivityMonitor m) { // Required for DevelopBuilder retry. _packagesVersion.Clear(); var solutionAndDrivers = DependentSolutionContext.Solutions; for (int i = 0; i < solutionAndDrivers.Count; ++i) { var s = solutionAndDrivers[i]; var upgrades = s.Solution.ImportedLocalPackages .Select(p => new UpdatePackageInfo( p.Project, p.Package.Artifact.Type, p.Package.Artifact.Name, _packagesVersion[p.Package.Artifact])) .ToList(); _upgrades[i] = upgrades; using (m.OpenInfo($"Preparing {s} build.")) { var pr = PrepareBuild(m, s.Solution, s.Driver, upgrades); ZeroBuilder.RegisterSHAlias(m); if (pr.Version == null) { return(false); } m.CloseGroup($"Target version: {pr.Version}{(pr.MustBuild ? "" :" (no build required)")}"); _targetVersions[i] = pr.MustBuild ? pr.Version : null; _packagesVersion.AddRange(s.Solution.Solution.GeneratedArtifacts.Select(p => new KeyValuePair <Artifact, SVersion>(p.Artifact, pr.Version))); } } return(true); }
void Run() { try { int streamVersion = _reader.ReadInt32(); if (_interProcess) { _server.DisposeLocalCopyOfClientHandle(); } for (; ;) { var e = LogEntry.Read(_reader, streamVersion, out bool badEndOfStream); if (e == null || badEndOfStream) { _endFlag = badEndOfStream ? LogReceiverEndStatus.MissingEndMarker : LogReceiverEndStatus.Normal; break; } switch (e.LogType) { case LogEntryType.Line: { if (_monitor.ShouldLogLine(e.LogLevel, e.Tags, out var finalTags)) { var d = new ActivityMonitorLogData(e.LogLevel | LogLevel.IsFiltered, finalTags, e.Text, CKException.CreateFrom(e.Exception), e.FileName, e.LineNumber); d.SetExplicitLogTime(e.LogTime); _monitor.UnfilteredLog(ref d); } break; } case LogEntryType.OpenGroup: { ActivityMonitorLogData d; if (_monitor.ShouldLogLine(e.LogLevel, e.Tags, out var finalTags)) { d = new ActivityMonitorLogData(e.LogLevel | LogLevel.IsFiltered, finalTags, e.Text, CKException.CreateFrom(e.Exception), e.FileName, e.LineNumber); d.SetExplicitLogTime(e.LogTime); } else { d = default; } _monitor.UnfilteredOpenGroup(ref d); } break; case LogEntryType.CloseGroup: _monitor.CloseGroup(e.Conclusions, e.LogTime); break; } } } catch (Exception ex) { _endFlag = LogReceiverEndStatus.Error; _monitor.UnfilteredLog(LogLevel.Fatal, null, "While receiving pipe logs.", ex); } }
void DemoOpenGroupThisWorksFine(IActivityMonitor m) { using (m.OpenInfo().Send("Doing things...")) { // ... m.CloseGroup("Success."); } }
void DemoOpenGroupThisWorksFine( IActivityMonitor m ) { using( m.OpenInfo().Send( "Doing things..." ) ) { // ... m.CloseGroup( "Success." ); } }
static Dictionary <string, LogFilter> ReadSourceOverrideFilter(XElement e, out SourceFilterApplyMode apply, IActivityMonitor monitor) { apply = SourceFilterApplyMode.None; using (monitor.OpenGroup(LogLevel.Trace, "Reading SourceOverrideFilter elements.", null)) { try { var s = e.Element("SourceOverrideFilter"); if (s == null) { monitor.CloseGroup("No source filtering (ApplyMode is None)."); return(new Dictionary <string, LogFilter>()); } apply = s.AttributeEnum("ApplyMode", SourceFilterApplyMode.Apply); var stranger = e.Elements("SourceOverrideFilter").Elements().FirstOrDefault(f => f.Name != "Add" && f.Name != "Remove"); if (stranger != null) { throw new XmlException("SourceOverrideFilter element must contain only Add and Remove elements." + stranger.GetLineColumnString()); } var result = e.Elements("SourceOverrideFilter") .Elements() .Select(f => new { File = f.AttributeRequired("File"), Filter = f.Name == "Add" ? f.GetRequiredAttributeLogFilter("Filter") : (LogFilter?)LogFilter.Undefined }) .Where(f => !String.IsNullOrWhiteSpace(f.File.Value)) .ToDictionary(f => f.File.Value, f => f.Filter.Value); monitor.CloseGroup(String.Format("{0} source files, ApplyMode is {1}.", result.Count, apply)); return(result); } catch (Exception ex) { monitor.SendLine(LogLevel.Error, "Error while reading SourceOverrideFilter element.", ex); return(null); } } }
void DemoOpenGroupWithDynamicConclusion( IActivityMonitor m ) { int nbProcessed = 0; using( m.OpenInfo().Send( "Doing things..." ) .ConcludeWith( () => String.Format( "{0} files.", nbProcessed ) ) ) { // ... nbProcessed += 21; m.CloseGroup( "Success." ); // The user Group conclusion is: "Success. - 21 files." (the two conclusions are concatenated). } }
void DemoOpenGroupWithDynamicConclusion(IActivityMonitor m) { int nbProcessed = 0; using (m.OpenInfo().Send("Doing things...") .ConcludeWith(() => String.Format("{0} files.", nbProcessed))) { // ... nbProcessed += 21; m.CloseGroup("Success."); // The user Group conclusion is: "Success. - 21 files." (the two conclusions are concatenated). } }
void StObjInitialize(IActivityMonitor m, IContextualStObjMap map) { using (m.OpenInfo($"Initializing CK.DB.Auth.Package : IAuthenticationDatabaseService")) { _allProviders = map.Implementations.OfType <IGenericAuthenticationProvider>().ToDictionary(p => p.ProviderName, StringComparer.OrdinalIgnoreCase); if (BasicProvider != null) { _allProviders.Add(BasicToGenericProviderAdapter.Name, new BasicToGenericProviderAdapter(BasicProvider)); } _allProvidersValues = new CKReadOnlyCollectionOnICollection <IGenericAuthenticationProvider>(_allProviders.Values); m.CloseGroup($"{_allProviders.Count} providers: " + _allProviders.Keys.Concatenate()); } }
/// <summary> /// Runs the build. Orchestrates calls to <see cref="PrepareBuild"/> and <see cref="Build"/>. /// </summary> /// <param name="m">The monitor to use.</param> /// <returns>The BuildResult on success, null on error.</returns> public BuildResult Run(IActivityMonitor m, bool forceRebuild) { if (_packagesVersion.Count > 0) { throw new InvalidOperationException(); } BuildResult result = CreateResultByPreparingBuilds(m, forceRebuild); if (result == null) { return(null); } using (m.OpenInfo("Running builds.")) { BuildState state = RunBuild(m); if (state == BuildState.Failed) { m.CloseGroup("Failed."); return(null); } if (state == BuildState.Succeed) { m.CloseGroup("Success."); } else { // This is not really optimal but it is required only for DevelopBuilder // (where version numbers are not fully known upfront AND commit may not be // amendable (when on a fresh checkout). // We may have resolved and applied the buildUpgrades in a tight dedicated loop // but we would need to do exactly the same code than the DevelopBuilder.RunBuild does: // applying build upgrades, checks the commit, then calls ReadCommitVersionInfo on actual // (non amended commits), applies this new version to any previous solution and start again... // This is exactly what this whole code does thanks to the BuildState.MustRetry. using (m.OpenInfo("Retrying running builds.")) { do { result = CreateResultByPreparingBuilds(m, forceRebuild); state = RunBuild(m); }while(state == BuildState.MustRetry); if (state == BuildState.Failed) { m.CloseGroup("Retry failed."); m.CloseGroup("Failed (with retries)."); return(null); } if (state == BuildState.Succeed) { m.CloseGroup("Retry succeed."); m.CloseGroup("Success (with retries)."); } } } } return(result); }
public bool PushLocalArtifacts(IActivityMonitor m, IArtifactRepository target, IEnumerable <ArtifactInstance> artifacts, bool arePublicArtifacts) { bool success = true; foreach (var h in _provider._handlers) { using (m.OpenTrace($"Pushing for type handler '{h}'.")) { if (!h.PushLocalArtifacts(this, m, target, artifacts, arePublicArtifacts)) { m.CloseGroup("Failed."); success = false; } } } return(success); }
/// <summary> /// Opens the key vault. /// </summary> /// <param name="m">The monitor to use.</param> /// <param name="passPhrase">The key vault pass phrase.</param> /// <returns>True on success.</returns> public bool OpenKeyVault(IActivityMonitor m, string passPhrase) { if (!CheckPassPhraseConstraints(m, passPhrase)) { return(false); } if (_passPhrase != null) { m.Info($"Key Vault is already opened."); return(true); } if (KeyVaultFileExists) { try { var keys = KeyVault.DecryptValues(File.ReadAllText(KeyVaultPath), passPhrase); m.OpenInfo($"Opening existing Key Vault with keys: {keys.Keys.Concatenate()}."); _store.ImportSecretKeys(m, keys); _passPhrase = passPhrase; _vaultContent.Clear(); _vaultContent.AddRange(keys); } catch (Exception ex) { m.Error("Unable to open the key vault.", ex); return(false); } } else { _passPhrase = passPhrase; m.OpenInfo($"New Key Vault opened."); } if (_store.Infos.Any(s => !s.IsSecretAvailable)) { using (m.OpenWarn($"Missing secrets:")) { foreach (var s in _store.Infos.Where(s => !s.IsSecretAvailable)) { m.Warn(s.ToString()); } } } m.CloseGroup(); return(true); }
/// <summary> /// Replays this monitor's content into another monitor. /// </summary> /// <param name="replay">The target monitor. Can not be null.</param> /// <param name="monitor">Optional monitor (nothing is logged when null).</param> public void Replay(IActivityMonitor replay, IActivityMonitor?monitor = null) { using (monitor?.OpenInfo($"Replaying activity from '{MonitorId}'.")) { int nbMissing = 0; int nbTotal = 0; using (var page = ReadFirstPage(1024)) { foreach (ParentedLogEntry e in page.Entries) { ++nbTotal; LogLevel level = e.Entry.LogLevel; if (e.IsMissing) { ++nbMissing; level = LogLevel.Trace; } switch (e.Entry.LogType) { case LogEntryType.Line: var d = new ActivityMonitorLogData(level, e.Entry.Tags, e.Entry.Text, CKException.CreateFrom(e.Entry.Exception), e.Entry.FileName, e.Entry.LineNumber); d.SetExplicitLogTime(e.Entry.LogTime); replay.UnfilteredLog(ref d); break; case LogEntryType.OpenGroup: d = new ActivityMonitorLogData(level, e.Entry.Tags, e.Entry.Text, CKException.CreateFrom(e.Entry.Exception), e.Entry.FileName, e.Entry.LineNumber); d.SetExplicitLogTime(e.Entry.LogTime); replay.UnfilteredOpenGroup(ref d); break; case LogEntryType.CloseGroup: replay.CloseGroup(e.Entry.Conclusions, e.Entry.LogTime); break; } } page.ForwardPage(); } monitor?.CloseGroup($"Replayed {nbTotal} entries ({nbMissing} missing)."); } }
public static void ReplayLogs(DirectoryInfo directory, bool recurse, Func <MultiLogReader.Monitor, ActivityMonitor> monitorProvider, IActivityMonitor m = null) { var reader = new MultiLogReader(); using (m != null ? m.OpenTrace().Send("Reading files from '{0}' {1}.", directory.FullName, recurse ? "(recursive)" : null) : null) { var files = reader.Add(directory.EnumerateFiles("*.ckmon", recurse ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly).Select(f => f.FullName)); if (files.Count == 0) { if (m != null) { m.Warn().Send("No *.ckmon files found!"); } } else { var monitors = reader.GetActivityMap().Monitors; if (m != null) { m.Trace().Send(String.Join(Environment.NewLine, files)); m.CloseGroup(String.Format("Found {0} file(s) containing {1} monitor(s).", files.Count, monitors.Count)); m.OpenTrace().Send("Extracting entries."); } foreach (var mon in monitors) { var replay = monitorProvider(mon); if (replay == null) { if (m != null) { m.Info().Send("Skipping activity from '{0}'.", mon.MonitorId); } } else { mon.Replay(replay, m); } } } } }
public bool FinalRegistration(AutoServiceCollectorResult typeResult, IEnumerable <InterfaceFamily> families) { using (_monitor.OpenInfo("Final Service registration.")) { bool success = true; foreach (var c in typeResult.RootClasses) { RegisterClassMapping(c, ref success); } foreach (var f in families) { Debug.Assert(f.Resolved != null); foreach (var i in f.Interfaces) { RegisterMapping(i.Type, f.Resolved, ref success); } } _monitor.CloseGroup($"Registered {_engineMap.ObjectMappings.Count} object mappings, {_engineMap.SimpleMappings.Count} simple mappings and {_engineMap.ManualMappingList.Count} factories for {_engineMap.ManualMappings.Count} manual mappings."); return(success); } }
/// <summary> /// Replays this monitor's content into another monitor. /// </summary> /// <param name="replay">The target monitor. Can not be null.</param> /// <param name="m">Optional monitor (nothing is logged when null).</param> public void Replay(IActivityMonitor replay, IActivityMonitor m = null) { using (m != null ? m.OpenGroup(LogLevel.Info, string.Format("Replaying activity from '{0}'.", MonitorId), null) : null) { int nbMissing = 0; int nbTotal = 0; using (var page = ReadFirstPage(1024)) { foreach (ParentedLogEntry e in page.Entries) { ++nbTotal; LogLevel level = e.Entry.LogLevel; if (e.IsMissing) { ++nbMissing; level = LogLevel.Trace; } switch (e.Entry.LogType) { case LogEntryType.Line: replay.UnfilteredLog(e.Entry.Tags, level, e.Entry.Text, e.Entry.LogTime, CKException.CreateFrom(e.Entry.Exception), e.Entry.FileName, e.Entry.LineNumber); break; case LogEntryType.OpenGroup: replay.UnfilteredOpenGroup(e.Entry.Tags, level, null, e.Entry.Text, e.Entry.LogTime, CKException.CreateFrom(e.Entry.Exception), e.Entry.FileName, e.Entry.LineNumber); break; case LogEntryType.CloseGroup: replay.CloseGroup(e.Entry.LogTime, e.Entry.Conclusions); break; } } page.ForwardPage(); } if (m != null) { m.CloseGroup(String.Format("Replayed {0} entries ({1} missing).", nbTotal, nbMissing)); } } }
void LogDemo(IActivityMonitor m) { m.Info("This is an info."); using (m.OpenInfo($"This is an info group.")) { m.Fatal($"Ouch! a faaaaatal."); m.OpenTrace($"A trace"); var group = m.OpenInfo($"This is another group (trace)."); { try { throw new Exception(); } catch (Exception ex) { m.Error("An error occurred.", ex); } } m.CloseGroup("This is a close group."); group.Dispose(); } }
/// <summary> /// Registers types from multiple assemblies. /// Only classes and IPoco interfaces are considered. /// Once the first type is registered, no more call to <see cref="SetAutoServiceKind(Type, AutoServiceKind)"/> is allowed. /// </summary> /// <param name="assemblyNames">The assembly names to register.</param> /// <returns>The number of new discovered classes.</returns> public int RegisterAssemblyTypes(IReadOnlyCollection <string> assemblyNames) { if (assemblyNames == null) { throw new ArgumentNullException(nameof(assemblyNames)); } int totalRegistered = 0; using (_monitor.OnError(() => ++ _registerFatalOrErrorCount)) using (_monitor.OpenTrace($"Registering {assemblyNames.Count} assemblies.")) { foreach (var one in assemblyNames) { using (_monitor.OpenTrace($"Registering assembly '{one}'.")) { Assembly?a = null; try { a = Assembly.Load(one); } catch (Exception ex) { _monitor.Error($"Error while loading assembly '{one}'.", ex); } if (a != null) { int nbAlready = _cc.RegisteredTypeCount; _cc.RegisterTypes(a.GetTypes()); int delta = _cc.RegisteredTypeCount - nbAlready; _monitor.CloseGroup($"{delta} types(s) registered."); totalRegistered += delta; } } } } return(totalRegistered); }
static Dictionary<string, LogFilter> ReadSourceOverrideFilter( XElement e, out SourceFilterApplyMode apply, IActivityMonitor monitor ) { apply = SourceFilterApplyMode.None; using( monitor.OpenGroup( LogLevel.Trace, "Reading SourceOverrideFilter elements.", null ) ) { try { var s = e.Element( "SourceOverrideFilter" ); if( s == null ) { monitor.CloseGroup( "No source filtering (ApplyMode is None)." ); return new Dictionary<string, LogFilter>(); } apply = s.AttributeEnum( "ApplyMode", SourceFilterApplyMode.Apply ); var stranger = e.Elements( "SourceOverrideFilter" ).Elements().FirstOrDefault( f => f.Name != "Add" && f.Name != "Remove" ); if( stranger != null ) { throw new XmlException( "SourceOverrideFilter element must contain only Add and Remove elements." + stranger.GetLineColumnString() ); } var result = e.Elements( "SourceOverrideFilter" ) .Elements() .Select( f => new { File = f.AttributeRequired( "File" ), Filter = f.Name == "Add" ? f.GetRequiredAttributeLogFilter( "Filter" ) : (LogFilter?)LogFilter.Undefined } ) .Where( f => !String.IsNullOrWhiteSpace( f.File.Value ) ) .ToDictionary( f => f.File.Value, f => f.Filter.Value ); monitor.CloseGroup( String.Format( "{0} source files, ApplyMode is {1}.", result.Count, apply ) ); return result; } catch( Exception ex ) { monitor.SendLine( LogLevel.Error, "Error while reading SourceOverrideFilter element.", ex ); return null; } } }
void DemoOpenGroupFarFromPerfect(IActivityMonitor m) { m.OpenInfo().Send("Doing things..."); // ... m.CloseGroup("Success."); }
bool DumpGitFolders(IActivityMonitor m, IEnumerable <GitFolder> gitFolders) { bool isLogFilterDefault = false; LogFilter final = m.ActualFilter; if (final == LogFilter.Undefined) { final = ActivityMonitor.DefaultFilter; isLogFilterDefault = true; } var msg = $"Monitor filters: User:'******' => Final:'{final}'{(isLogFilterDefault ? "(AppDomain's default)" : "")}."; m.UnfilteredLog(ActivityMonitor.Tags.Empty, LogLevel.Info, msg, m.NextLogTime(), null); int gitFoldersCount = 0; bool hasPluginInitError = false; var dirty = new List <string>(); foreach (var git in gitFolders) { ++gitFoldersCount; string commitAhead = git.Head.AheadOriginCommitCount != null ? $"{git.Head.AheadOriginCommitCount} commits ahead origin" : "Untracked"; using (m.OpenInfo($"{git.SubPath} - branch: {git.CurrentBranchName} ({commitAhead}).")) { string pluginInfo; if (!git.EnsureCurrentBranchPlugins(m)) { hasPluginInitError = true; pluginInfo = "Plugin initialization error."; } else { pluginInfo = $"({git.PluginManager.BranchPlugins[git.CurrentBranchName].Count} plugins)"; } if (git.CheckCleanCommit(m)) { m.CloseGroup("Up-to-date. " + pluginInfo); } else { dirty.Add(git.SubPath); m.CloseGroup("Dirty. " + pluginInfo); } } } if (gitFoldersCount == 0) { m.Error("No git folder found."); } else { m.CloseGroup($"{dirty.Count} dirty (out of {gitFoldersCount})."); if (dirty.Count > 0) { m.Info($"Dirty: {dirty.Concatenate()}"); } var byActiveBranch = gitFolders.GroupBy(g => g.CurrentBranchName); if (byActiveBranch.Count() > 1) { using (m.OpenWarn($"{byActiveBranch.Count()} different branches:")) { foreach (var b in byActiveBranch) { using (m.OpenWarn($"Branch '{b.Key}':")) { m.Warn(b.Select(g => g.SubPath.Path).Concatenate()); } } } } else { m.Info($"All {gitFoldersCount} git folders are on '{byActiveBranch.First().Key}' branch."); } if (hasPluginInitError) { m.Error("At least one git folder is unable to initialize its plugins."); } } return(!hasPluginInitError); }
/// <summary> /// Finalizes the code generation. /// </summary> /// <param name="monitor">The monitor to use.</param> /// <returns>True on success, false on error.</returns> public bool FinalizeCodeGeneration(IActivityMonitor monitor) { if (_finalizedCall.HasValue) { return(_finalizedCall.Value); } using (monitor.OpenInfo($"Generating Json serialization with {_map.Count} mappings to {_typeInfos.Count} types.")) { int missingCount = 0; foreach (var t in _typeInfos) { if (t.CodeReader == null || t.CodeWriter == null) { ++missingCount; using (_monitor.OpenTrace($"Missing CodeReader/Writer for '{t.NonNullableJsonName}'. Raising TypeInfoConfigurationRequired.")) { try { TypeInfoConfigurationRequired?.Invoke(this, new TypeInfoConfigurationRequiredEventArg(_monitor, this, t)); } catch (Exception ex) { _monitor.Error($"While raising TypeInfoConfigurationRequired for '{t.NonNullableJsonName}'.", ex); _finalizedCall = false; return(false); } } } } if (missingCount > 0) { // Let the TypeInfo be configured in any order (the event for Z may have configured A and Z together). var missing = _typeInfos.Where(i => i.CodeWriter == null || i.CodeReader == null).ToList(); if (missing.Count > 0) { _monitor.Error($"Missing Json CodeReader/Writer functions for types '{missing.Select( m => m.NonNullableJsonName ).Concatenate( "', '" )}'."); _finalizedCall = false; return(false); } } // Generates the code for "dynamic"/"untyped" object. // Writing must handle the object instance to write. Null reference/value type can be handled immediately (by writing "null"). // When not null, we are dealing only with concrete types here: the object MUST be of an allowed concrete type, an abstraction // that wouldn't be one of the allowed concrete type must NOT be handled! // That's why we can use a direct pattern matching on the object's type for the write method (reference types are ordered from specializations // to generalization) and we use the GenericWriteHandler to remove NRT duplicates. GenerateDynamicWrite(_typeInfos); // Reading must handle the [TypeName,...] array: it needs a lookup from the "type name" to the handler to use: this is the goal of // the _typeReaders dictionary that we initialize here (no concurrency issue, no lock to generate: once built the dictionary will only // be read). GenerateDynamicRead(); using (monitor.OpenTrace("Raising JsonTypeFinalized event.")) { string message = "While raising JsonTypeFinalized."; try { JsonTypeFinalized?.Invoke(this, new EventMonitoredArgs(monitor)); message = "While executing deferred actions to GenerateRead/Write code."; foreach (var a in _finalReadWrite) { a(monitor); } } catch (Exception ex) { _monitor.Error(message, ex); _finalizedCall = false; return(false); } } monitor.CloseGroup("Success."); _finalizedCall = true; return(true); } }
/// <summary> /// Runs the builder: publishes the build projects that needs to be. /// This is private: <see cref="EnsureZeroBuildProjects"/> calls it. /// </summary> /// <param name="m">The monitor to use.</param> /// <param name="mustReloadSolutions">True if solutions must be reloaded.</param> /// <returns>True on success, false on error.</returns> bool Run(IActivityMonitor m, IBasicApplicationLifetime appLife, out bool mustReloadSolutions) { Debug.Assert(_mustBuild.Count == ZeroBuildProjects.Count); ReadCurrentSha(m); Debug.Assert(ZeroBuildProjects.Select(p => _context.FindDriver(p.Project)) .All(d => d.GitRepository.CheckCleanCommit(m)), "Repositories are clean."); mustReloadSolutions = false; try { using (m.OpenTrace("Analysing dependencies.")) { foreach (var p in ZeroBuildProjects) { using (m.OpenInfo($"{p} <= {(p.AllDependencies.Any() ? p.AllDependencies.Select( d => d.Name ).Concatenate() : "(no dependency)")}.")) { var driver = _context.FindDriver(p.Project); // Check cache. var currentTreeSha = _currentShas[p.Index]; if (currentTreeSha == null) { throw new Exception($"Unable to get Sha for {p}."); } if (!_sha1Cache.TryGetValue(p.Project.FullFolderPath, out var shaList)) { m.Info($"ReasonToBuild#1: No cached Sha signature found for {p.Project.FullFolderPath}."); } else if (!shaList.Contains(currentTreeSha)) { m.Info($"ReasonToBuild#2: Current Sha signature differs from the cached ones."); } else if (p.AllDependencies.Any(dep => _mustBuild.Contains(dep.FullFolderPath))) { m.Info($"ReasonToBuild#3: Rebuild dependencies are {_mustBuild.Intersect( p.AllDependencies.Select( dep => dep.FullFolderPath.Path ) ).Concatenate()}."); } else if (p.MustPack && !System.IO.File.Exists( System.IO.Path.Combine( _localFeedProvider.ZeroBuild.PhysicalPath, p.Project.SimpleProjectName + ".0.0.0-0.nupkg"))) { m.Info($"ReasonToBuild#4: {p.Project.SimpleProjectName}.0.0.0-0 does not exist in in Zero build feed."); } else if (p.Project.IsBuildProject && !System.IO.File.Exists(_localFeedProvider.GetZeroVersionCodeCakeBuilderExecutablePath(p.Project.Solution.Name))) { m.Info($"ReasonToBuild#5: Published ZeroVersion CodeCakeBuilder is missing."); } else { _mustBuild.Remove(p.Project.FullFolderPath); m.CloseGroup($"Project '{p}' is up to date. Build skipped."); } } if (appLife.StopRequested(m)) { return(false); } } } if (_mustBuild.Count == 0) { m.Info("Nothing to build. Build projects are up-to-date."); mustReloadSolutions = false; } else { mustReloadSolutions = true; using (m.OpenTrace($"Build/Publish {_mustBuild.Count} build projects: {_mustBuild.Concatenate()}")) { foreach (var p in ZeroBuildProjects.Where(p => _mustBuild.Contains(p.Project.FullFolderPath))) { var action = p.MustPack ? "Publishing" : "Building"; using (m.OpenInfo($"{action} {p}.")) { var driver = _context.FindDriver(p.Project); if (!driver.ZeroBuildProject(m, p)) { _sha1Cache.Remove(p.Project.FullFolderPath); m.CloseGroup("Failed."); return(false); } _mustBuild.Remove(p.Project.FullFolderPath); AddCurrentShaToCache(m, p); m.CloseGroup("Success."); } if (appLife.StopRequested(m)) { return(false); } } } } return(true); } finally { if (mustReloadSolutions) { SaveShaCache(m); } Debug.Assert(ZeroBuildProjects.Select(p => _context.FindDriver(p.Project)) .All(d => d.GitRepository.CheckCleanCommit(m)), "Repositories are clean."); } }
bool DoComputeFinalTypeKind(IActivityMonitor m, IAutoServiceKindComputeFacade ctx, AutoServiceKind initial, ref bool success) { Debug.Assert(_rawImpls != null); const AutoServiceKind FrontTypeMask = AutoServiceKind.IsFrontProcessService | AutoServiceKind.IsFrontService; bool isScoped = (initial & AutoServiceKind.IsScoped) != 0; HashSet <Type>?allMarshallableTypes = null; HashSet <Type>?frontMarshallableTypes = null; // If it is [IsMarshallable], the marshaller must handle the marhsalling of any implementations // (this is strange... but who knows?). bool isInterfaceMarshallable = (initial & AutoServiceKind.IsMarshallable) != 0; // If isInterfaceMarshallable is false (regular case), then for this IEnumerable to be marshallable, all its // implementations that are Front services must be marshallable so that it can be resolved as long as its // implementations have been marshalled. // Lets's be optimistic: all implementations that are Front(Process) services (if any) will be marshallable. bool isAutomaticallyMarshallable = true; using (m.OpenTrace($"Computing 'IEnumerable<{EnumeratedType.FullName}>'s final type from {_rawImpls.Count} implementations. Initial: '{initial}'.")) { foreach (var info in _rawImpls) { // RealObject are singleton, are not mashallable and not front process. if (info is RealObjectClassInfo) { continue; } Debug.Assert(info.ServiceClass != null); var impl = info.ServiceClass.MostSpecialized; Debug.Assert(impl != null); // We provide a new empty "cycle detection context" to the class constructors: IEnumerable // of interfaces break potential cycles since they handle their own cycle by resolving to // the "worst" non marshallable IsFrontService|IsScoped. // We consider that if the IEnumerable (or one of its class) cannot be resolved by the DI container, // it's not our problem here. var k = impl.ComputeFinalTypeKind(m, ctx, new Stack <AutoServiceClassInfo>(), ref success); // Check for scope lifetime. if (!isScoped) { if ((k & AutoServiceKind.IsScoped) != 0) { if ((initial & AutoServiceKind.IsSingleton) != 0) { m.Error($"Lifetime error: Type 'IEnumerable<{EnumeratedType.FullName}>' has been registered as a Singleton but implementation '{impl.ClassType}' is Scoped."); success = false; } else { isScoped = true; m.Info($"Type 'IEnumerable<{EnumeratedType.FullName}>' must be Scoped since the implementation '{impl.ClassType}' is Scoped."); } } } // If the implementation is not a front service, we skip it (we don't care of a IsMarshallable only type). if ((k & (AutoServiceKind.IsFrontService | AutoServiceKind.IsFrontProcessService)) == 0) { continue; } var newFinal = _finalKind | (k & AutoServiceKind.IsFrontProcessService | AutoServiceKind.IsFrontService); if (newFinal != _finalKind) { // Upgrades from None, Process to Front... m.Trace($"Type 'IEnumerable<{EnumeratedType.FullName}>' must be {newFinal & FrontTypeMask}, because of (at least) '{impl.ClassType}' implementation."); _finalKind = newFinal; } // If the enumerated Service is marshallable at its level OR it is already known to be NOT automatically marshallable, // we don't have to worry anymore about the subsequent implementations marshalling. if (isInterfaceMarshallable || !isAutomaticallyMarshallable) { continue; } if ((k & AutoServiceKind.IsMarshallable) == 0) { if (success) { m.Warn($"Type 'IEnumerable<{EnumeratedType.FullName}>' is not marshallable and the implementation '{impl.ClassType}' that is a Front service is not marshallable: 'IEnumerable<{EnumeratedType.Name}>' cannot be considered as marshallable."); } isAutomaticallyMarshallable = false; } else { if (allMarshallableTypes == null) { allMarshallableTypes = new HashSet <Type>(); } Debug.Assert(impl.MarshallableTypes != null, "EnsureCtorBinding has been called."); allMarshallableTypes.AddRange(impl.MarshallableTypes); if ((k & AutoServiceKind.IsFrontService) != 0) { if (frontMarshallableTypes == null) { frontMarshallableTypes = new HashSet <Type>(); } Debug.Assert(impl.MarshallableInProcessTypes != null, "EnsureCtorBinding has been called."); frontMarshallableTypes.AddRange(impl.MarshallableInProcessTypes); } } } // Conclude about lifetime. if (!isScoped) { if (success && (initial & AutoServiceKind.IsSingleton) == 0) { m.Info($"Nothing prevents 'IEnumerable<{EnumeratedType.FullName}>' to be considered as a Singleton: this is the most efficient choice."); } _finalKind |= AutoServiceKind.IsSingleton; } else { _finalKind |= AutoServiceKind.IsScoped; } // Conclude about Front aspect. if (isInterfaceMarshallable) { MarshallableTypes = MarshallableInProcessTypes = new[] { EnumeratedType }; Debug.Assert((_finalKind & AutoServiceKind.IsMarshallable) == 0); } else { if (isAutomaticallyMarshallable && allMarshallableTypes != null) { Debug.Assert(allMarshallableTypes.Count > 0); MarshallableTypes = allMarshallableTypes; _finalKind |= AutoServiceKind.IsMarshallable; if (frontMarshallableTypes != null) { MarshallableInProcessTypes = frontMarshallableTypes; } else { MarshallableInProcessTypes = Type.EmptyTypes; } } else { // This service is not a Front service OR it is not automatically marshallable. // We have nothing special to do: the set of Marshallable types is empty (this is not an error). MarshallableTypes = MarshallableInProcessTypes = Type.EmptyTypes; Debug.Assert((_finalKind & AutoServiceKind.IsMarshallable) == 0); } } if (_finalKind != initial) { m.CloseGroup($"Final: {_finalKind}"); } } return(success); }
public static void ReplayLogs( DirectoryInfo directory, bool recurse, Func<MultiLogReader.Monitor, ActivityMonitor> monitorProvider, IActivityMonitor m = null ) { var reader = new MultiLogReader(); using( m != null ? m.OpenTrace().Send( "Reading files from '{0}' {1}.", directory.FullName, recurse ? "(recursive)" : null ) : null ) { var files = reader.Add( directory.EnumerateFiles( "*.ckmon", recurse ? SearchOption.AllDirectories : SearchOption.TopDirectoryOnly ).Select( f => f.FullName ) ); if( files.Count == 0 ) { if( m != null ) m.Warn().Send( "No *.ckmon files found!" ); } else { var monitors = reader.GetActivityMap().Monitors; if( m != null ) { m.Trace().Send( String.Join( Environment.NewLine, files ) ); m.CloseGroup( String.Format( "Found {0} file(s) containing {1} monitor(s).", files.Count, monitors.Count ) ); m.OpenTrace().Send( "Extracting entries." ); } foreach( var mon in monitors ) { var replay = monitorProvider( mon ); if( replay == null ) { if( m != null ) m.Info().Send( "Skipping activity from '{0}'.", mon.MonitorId ); } else { mon.Replay( replay, m ); } } } } }
/// <summary> /// Closes the current Group. Optional parameter is polymorphic. It can be a string, a <see cref="ActivityLogGroupConclusion"/>, /// a <see cref="List{T}"/> or an <see cref="IEnumerable{T}"/> of ActivityLogGroupConclusion, or any object with an overridden <see cref="Object.ToString"/> method. /// See remarks (especially for List<ActivityLogGroupConclusion>). /// </summary> /// <param name="this">This <see cref="IActivityMonitor"/>.</param> /// <param name="userConclusion">Optional string, ActivityLogGroupConclusion object, enumerable of ActivityLogGroupConclusion or object to conclude the group. See remarks.</param> /// <remarks> /// An untyped object is used here to easily and efficiently accommodate both string and already existing ActivityLogGroupConclusion. /// When a List<ActivityLogGroupConclusion> is used, it will be directly used to collect conclusion objects (new conclusions will be added to it). This is an optimization. /// </remarks> public static void CloseGroup(this IActivityMonitor @this, object userConclusion = null) { @this.CloseGroup(NextLogTime(@this), userConclusion); }
public static void FullStaticResolutionOnly(this YodiiEngine @this, Action <IYodiiEngineStaticOnlyResult> tests, [CallerMemberName] string callerName = null) { IActivityMonitor m = TestHelper.ConsoleMonitor; IYodiiEngineStaticOnlyResult[] result = new IYodiiEngineStaticOnlyResult[4]; using (m.OpenInfo().Send("FullStaticResolutionOnly for {0}.", callerName)) { using (m.OpenInfo().Send("StaticResolutionOnly().")) { result[0] = @this.StaticResolutionOnly(false, false); result[0].Trace(m); } using (m.OpenInfo().Send("StaticResolutionOnly( revertServices ).")) { result[1] = @this.StaticResolutionOnly(true, false); result[1].Trace(m); } using (m.OpenInfo().Send("StaticResolutionOnly( revertPlugins ).")) { result[2] = @this.StaticResolutionOnly(false, true); result[2].Trace(m); } using (m.OpenInfo().Send("StaticResolutionOnly( revertServices, revertPlugins ).")) { result[3] = @this.StaticResolutionOnly(true, true); result[3].Trace(m); } int comparingErrorCount = 0; int comparingWarningCount = 0; using (m.CatchCounter((f, e, w) => { comparingErrorCount = f + e; comparingWarningCount = w; })) using (m.OpenInfo().Send("Comparing results.")) { if (result[0].Success) { if (!result[1].Success) { m.Error().Send("revertServices has failed."); } if (!result[2].Success) { m.Error().Send("revertPlugins has failed."); } if (!result[3].Success) { m.Error().Send("revertPlugins & revertServices has failed."); } } else { var refItems = String.Join(", ", result[0].StaticFailureResult.BlockingItems.Select(i => i.FullName).OrderBy(Util.FuncIdentity)); if (result[1].Success) { m.Error().Send("revertServices succeeded."); } else { var items = String.Join(", ", result[1].StaticFailureResult.BlockingItems.Select(i => i.FullName).OrderBy(Util.FuncIdentity)); if (items != refItems) { m.Warn().Send("revertServices found blocking items: '{1}' where default found: {0}.", refItems, items); } } if (result[2].Success) { m.Error().Send("revertPlugins succeeded."); } else { var items = String.Join(", ", result[2].StaticFailureResult.BlockingItems.Select(i => i.FullName).OrderBy(Util.FuncIdentity)); if (items != refItems) { m.Warn().Send("revertServices found blocking items: '{1}' where default found: {0}.", refItems, items); } } if (result[3].Success) { m.Error().Send("revertPlugins & revertServices succeeded."); } else { var items = String.Join(", ", result[3].StaticFailureResult.BlockingItems.Select(i => i.FullName).OrderBy(Util.FuncIdentity)); if (items != refItems) { m.Warn().Send("revertPlugins & revertServices found blocking items: '{1}' where default found: {0}.", refItems, items); } } } } using (m.OpenInfo().Send("Executing tests predicates.")) { tests(result[0]); tests(result[1]); tests(result[2]); tests(result[3]); } if (comparingErrorCount == 0) { if (comparingWarningCount == 0) { m.CloseGroup("No difference between plugin/service ordering."); } else { m.CloseGroup("Plugin/service ordering leads to different blocking detection. See logs for details."); } } else { Assert.Fail("Plugin/service ordering leads to different result! (See logs for details.)"); } } }
/// <summary> /// Dumps the result of the compilation into a monitor. /// </summary> /// <param name="monitor">The monitor to use.</param> /// <param name="dumpSources">Optionnaly dumps the source as another <see cref="CK.Core.LogLevel"/>.</param> public void LogResult(IActivityMonitor monitor, LogLevel?dumpSources = null) { if (monitor == null) { throw new ArgumentNullException(nameof(monitor)); } using (monitor.OpenInfo("Code Generation information.")) { if (LoadConflicts != null && LoadConflicts.Count > 0) { using (monitor.OpenWarn($"{LoadConflicts.Count} assembly load conflict(s).")) { foreach (var e in LoadConflicts) { if (e.Resolved != null) { monitor.Warn(e.ToString()); } else { monitor.Error(e.ToString()); } } } } if (Success) { monitor.Info(CompilationSkipped ? "Source code parsing succeeded." : "Source code compilation succeeded."); if (dumpSources.HasValue) { DumpSources(monitor, dumpSources.Value); } } else { using (monitor.OpenError(CompilationSkipped ? "Parsing failed." : "Compilation failed.")) { if (EmitError != null) { monitor.Error(EmitError); } if (EmitResult != null) { if (!EmitResult.Success) { using (monitor.OpenInfo($"{EmitResult.Diagnostics.Count()} Compilation diagnostics.")) { foreach (var diag in EmitResult.Diagnostics) { monitor.Trace(diag.ToString()); } } } } else { Debug.Assert(CompilationSkipped); using (monitor.OpenInfo($"{ParseDiagnostics.Count()} Parsing diagnostics.")) { foreach (var diag in ParseDiagnostics) { monitor.Trace(diag.ToString()); } } } if (dumpSources.HasValue) { DumpSources(monitor, dumpSources.Value); } } } if (AssemblyLoadError != null) { monitor.Error("Generated assembly load failed.", AssemblyLoadError); } monitor.CloseGroup(Assembly != null ? "Generated assembly successfuly loaded." : (Success ? "Succeeded." : "Failed.")); } }
public bool Resolve(IActivityMonitor m, FinalRegistrar _) { bool success = true; Debug.Assert(Classes.Count > 0 && Interfaces.Count > 0); if (Classes.Count == 1) { Resolved = Classes.Single().Class; } else { using (m.OpenInfo($"Service resolution required for {ToString()}.")) { if (success = InitializeClasses(m)) { var headCandidates = Classes.Where(c => c.IsHeadCandidate).ToList(); var heads = headCandidates.Where(c => c.IsHead).ToList(); if (headCandidates.Count == 0) { m.Error($"No possible implementation found. A class that implements '{BaseInterfacesToString()}' interfaces is required."); success = false; } else if (heads.Count == 0) { m.Error($"Among '{headCandidates.Select( c => c.ToString() ).Concatenate( "', '" )}' possible implementations, none covers the whole set of other implementations. Use [ReplaceAutoService(...)] attribute to disambiguate."); var couldUseStObjConstruct = headCandidates.Select(c => c.Class.TypeInfo) .OfType <RealObjectClassInfo>() .Where(c => c.ConstructParameters != null && c.ConstructParameters .Any(p => headCandidates.Select(x => x.Class.ClassType).Any(o => p.ParameterType.IsAssignableFrom(o)))) .Select(c => $"{c.Type.FullName}.StObjConstruct( {c.ConstructParameters.Select( p => p.ParameterType.Name ).Concatenate() } )") .FirstOrDefault(); if (couldUseStObjConstruct != null) { m.Error($"Please note that RealObject.StObjConstruct parameters are irrelevant to Service resolution: for instance {couldUseStObjConstruct} is ignored. Use [ReplaceAutoService(...)] attribute."); } success = false; } else if (heads.Count > 1) { m.Error($"Multiple possible implementations found: '{heads.Select( c => c.ToString() ).Concatenate( "', '" )}'. They must be unified."); success = false; } else { // Here comes the "dispatcher" handling and finalRegistrar must // register all BuildClassInfo required by special handling of // handled parameters (IReadOnlyCollection<IService>...). var r = heads[0].Class; Resolved = r; m.CloseGroup($"Resolved to '{r}'."); } } } } if (success) { foreach (var i in _interfaces) { i.FinalResolved = Resolved; } } return(success); }
void DemoOpenGroupFarFromPerfect( IActivityMonitor m ) { m.OpenInfo().Send( "Doing things..." ); // ... m.CloseGroup( "Success." ); }