private void DisplayInnerDetailErrorMessage(ExtendedErrorInfo error) { WriteError(string.Format(ErrorFormat, error.Code, error.Message)); if(error.Details != null) { foreach(var innerError in error.Details) { DisplayInnerDetailErrorMessage(innerError); } } }
public static PSResourceManagerError ToPSResourceManagerError(this ExtendedErrorInfo error) { PSResourceManagerError rmError = new PSResourceManagerError { Code = error.Code, Message = error.Message, Target = string.IsNullOrEmpty(error.Target) ? null : error.Target }; if (error.Details != null) { List <PSResourceManagerError> innerRMErrors = new List <PSResourceManagerError>(); foreach (var innerError in error.Details) { innerRMErrors.Add(innerError.ToPSResourceManagerError()); } rmError.Details = innerRMErrors; } return(rmError); }
internal static void DeserializeExtendedErrorInfo(RpcDatabaseCopyStatus2[] statusResults) { if (statusResults != null) { foreach (RpcDatabaseCopyStatus2 rpcDatabaseCopyStatus in statusResults) { if (rpcDatabaseCopyStatus.ExtendedErrorInfoBytes != null && rpcDatabaseCopyStatus.ExtendedErrorInfoBytes.Length > 0) { try { rpcDatabaseCopyStatus.ExtendedErrorInfo = ExtendedErrorInfo.Deserialize(rpcDatabaseCopyStatus.ExtendedErrorInfoBytes); } catch (SerializationException arg) { ExTraceGlobals.ReplayServiceRpcTracer.TraceError <Guid, SerializationException>(0L, "GetCopyStatus(): Failed to deserialize ExtendedErrorInfo for database '{0}'. Exception: {1}", rpcDatabaseCopyStatus.DBGuid, arg); } catch (TargetInvocationException arg2) { ExTraceGlobals.ReplayServiceRpcTracer.TraceError <Guid, TargetInvocationException>(0L, "GetCopyStatus(): Failed to deserialize ExtendedErrorInfo for database '{0}'. Exception: {1}", rpcDatabaseCopyStatus.DBGuid, arg2); } } } } }
public void FailedDbSeed(ExEventLog.EventTuple errorEventTuple, LocalizedString errorMessage, ExtendedErrorInfo errorInfo) { lock (this.m_instance) { this.LogCrimsonEventOnStateChange <bool>("Seeding", this.Seeding, false); this.Seeding = false; if (this.m_perfmonCounters != null) { this.m_perfmonCounters.Failed = 1L; this.m_perfmonCounters.Disconnected = 0L; if (this.Suspended) { this.m_perfmonCounters.FailedSuspended = 1L; } } if (this.Suspended) { this.SetSuspendedInternal(); } this.FailureInfo.SetBroken(errorEventTuple, errorMessage, errorInfo); this.FailureInfo.PersistFailure(this.m_replayState); ExTraceGlobals.ReplicaInstanceTracer.TraceDebug <string, LocalizedString>((long)this.GetHashCode(), "{0}: Seeding failed with error: {1}", this.m_displayName, errorMessage); this.ExternalStatus.Refresh(); } }
public void SetFailedAndSuspended(uint failureEventId, LocalizedString errorMessage, ExtendedErrorInfo errorInfo) { lock (this.m_instance) { if (this.m_perfmonCounters != null) { this.m_perfmonCounters.Failed = 1L; this.m_perfmonCounters.FailedSuspended = 1L; this.m_perfmonCounters.Disconnected = 0L; } this.SetSuspendedInternal(); this.FailureInfo.SetBroken(new uint?(failureEventId), errorMessage, errorInfo); this.ExternalStatus.Refresh(); } }
private void SetBrokenInternal(FailureTag failureTag, ExEventLog.EventTuple setBrokenEventTuple, ExtendedErrorInfo extendedErrorInfo, params string[] setBrokenArgsPlusDb) { int num; string text = setBrokenEventTuple.EventLogToString(out num, setBrokenArgsPlusDb); Exception failureException = extendedErrorInfo.FailureException; int num2 = 0; if (failureException != null) { num2 = failureException.HResult; } ReplayCrimsonEvents.SetBroken.LogPeriodic <Guid, string, string, string, Exception, int>(this.m_databaseName, DiagCore.DefaultEventSuppressionInterval, this.m_guid, this.m_databaseName, text, Environment.StackTrace, failureException, num2); bool flag = false; lock (this.m_instance) { flag = this.IsBroken; this.FailureInfo.SetBroken(setBrokenEventTuple, new LocalizedString(text), extendedErrorInfo); if (this.m_perfmonCounters != null) { this.m_perfmonCounters.Failed = 1L; this.m_perfmonCounters.Disconnected = 0L; if (this.Suspended) { this.m_perfmonCounters.FailedSuspended = 1L; } } bool flag3; setBrokenEventTuple.LogEvent(this.m_identity, out flag3, setBrokenArgsPlusDb); ExTraceGlobals.ReplicaInstanceTracer.TraceDebug <string, FailureTag, string>((long)this.GetHashCode(), "{0} SetBroken with tag {1} because {2}", this.m_displayName, failureTag, text); MonitoredDatabase monitoredDatabase = MonitoredDatabase.FindMonitoredDatabase(this.ReplicaInstance.Configuration.ServerName, this.m_guid); if (monitoredDatabase != null && this.PassiveSeedingSourceContext != PassiveSeedingSourceContextEnum.None) { ExTraceGlobals.ReplicaInstanceTracer.TraceDebug <Guid>((long)this.GetHashCode(), "Cancel seeding for mdb {0}", this.m_guid); SourceSeedTable.Instance.CancelSeedingIfAppropriate(SourceSeedTable.CancelReason.CopyFailed, monitoredDatabase.DatabaseGuid); } bool flag4 = false; if (flag3 && (!RegistryParameters.DisableSetBrokenFailureItemSuppression || this.IsSuppressableFailureTag(failureTag)) && !this.IsNonSuppressableFailureTag(failureTag)) { flag4 = true; } if (!flag4 && failureTag != FailureTag.NoOp) { FailureItemPublisherHelper.PublishAction(failureTag, this.m_guid, this.m_databaseName); } if (!flag) { this.FailureInfo.PersistFailure(this.m_replayState); } this.ExternalStatus.Refresh(); } if (!flag) { this.m_instance.PrepareToStop(); } }
// Token: 0x060018F8 RID: 6392 RVA: 0x00066668 File Offset: 0x00064868 public void BeginAutoReseedIfNecessary(AutoReseedContext context) { Guid guid = context.Database.Guid; string name = context.Database.Name; if (context.TargetCopyStatus == null) { AutoReseedWorkflowLauncher.Tracer.TraceError <string, AmServerName>((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database copy '{0}\\{1}' has null TargetCopyStatus. Skipping.", name, context.TargetServerName); return; } if (context.TargetCopyStatus.Result != CopyStatusRpcResult.Success) { AutoReseedWorkflowLauncher.Tracer.TraceError((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Skipping since GetCopyStatus RPC to database copy '{0}\\{1}' failed. Result: {2}. Error: {3}.", new object[] { name, context.TargetServerName, context.TargetCopyStatus.Result, context.TargetCopyStatus.LastException }); return; } this.RunNeverMountedActiveWorkflow(context); this.RunHealthyCopyWorkflowIfNecessary(context); ExtendedErrorInfo extendedErrorInfo = context.TargetCopyStatus.CopyStatus.ExtendedErrorInfo; if (context.TargetCopyStatus.CopyStatus.CopyStatus == CopyStatusEnum.Failed && (extendedErrorInfo == null || extendedErrorInfo.FailureException == null || !(extendedErrorInfo.FailureException is ReplayServiceRpcCopyStatusTimeoutException))) { if (context.TargetCopyStatus.IsActive) { AutoReseedWorkflowLauncher.Tracer.TraceDebug <string, AmServerName>((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database copy '{0}\\{1}' is 'Failed' but active. Skipping.", name, context.TargetServerName); return; } if (this.m_suppression.ReportWorkflowLaunchConditionMet(AutoReseedWorkflowType.FailedCopy, guid, CatalogAutoReseedWorkflow.CatalogAutoReseedReason.None, 1)) { AutoReseedWorkflowLauncher.Tracer.TraceDebug <string, AmServerName, TimeSpan>((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database copy '{0}\\{1}' has been 'Failed' for at least {2}. Launching the FailedCopyWorkflow workflow.", name, context.TargetServerName, AutoReseedWorkflowSuppression.s_dbFailedSuppresionInterval); FailedCopyWorkflow failedCopyWorkflow = new FailedCopyWorkflow(context, context.TargetCopyStatus.CopyStatus.ErrorMessage); failedCopyWorkflow.Execute(); return; } AutoReseedWorkflowLauncher.Tracer.TraceDebug((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database copy '{0}\\{1}' is 'Failed' but launching the recovery workflow is being skipped due to either initial suppression of {2}, or periodic suppression of {3}.", new object[] { name, context.TargetServerName, AutoReseedWorkflowSuppression.s_dbFailedSuppresionInterval, AutoReseedWorkflowSuppression.s_dbFailedSuppresionInterval }); return; } else { if (context.TargetCopyStatus.CopyStatus.CopyStatus != CopyStatusEnum.FailedAndSuspended) { if (context.TargetCopyStatus.CopyStatus.ContentIndexStatus == ContentIndexStatusType.FailedAndSuspended) { if (context.CopyStatusesForTargetDatabase.All((CopyStatusClientCachedEntry status) => status.Result == CopyStatusRpcResult.Success && (status.CopyStatus.ContentIndexStatus == ContentIndexStatusType.Disabled || status.CopyStatus.ContentIndexStatus == ContentIndexStatusType.Suspended || status.CopyStatus.ContentIndexStatus == ContentIndexStatusType.FailedAndSuspended))) { AutoReseedWorkflowLauncher.Tracer.TraceDebug <string, Guid, string>((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: AutoReseed workflow launcher detected all catalogs failed for database '{0}' [{1}]: {2}.", context.Database.Name, context.Database.Guid, context.TargetCopyStatus.CopyStatus.ErrorMessage); ReplayCrimsonEvents.AutoReseedWorkflowAllCatalogFailed.Log <string, Guid, string, string>(context.Database.Name, context.Database.Guid, "FailedSuspendedCatalogRebuildWorkflow", context.TargetCopyStatus.CopyStatus.ContentIndexErrorMessage); if (this.m_suppression.ReportWorkflowLaunchConditionMet(AutoReseedWorkflowType.FailedSuspendedCatalogRebuild, guid, CatalogAutoReseedWorkflow.CatalogAutoReseedReason.None, 1)) { AutoReseedWorkflowLauncher.Tracer.TraceDebug <string, TimeSpan>((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database '{0}' has no catalogs in healthy state for at least {1}. Launching the recovery workflow.", name, AutoReseedWorkflowSuppression.s_ciRebuildSuppresionInterval); FailedSuspendedCatalogRebuildWorkflow failedSuspendedCatalogRebuildWorkflow = new FailedSuspendedCatalogRebuildWorkflow(context, context.TargetCopyStatus.CopyStatus.ContentIndexErrorMessage); failedSuspendedCatalogRebuildWorkflow.Execute(); return; } AutoReseedWorkflowLauncher.Tracer.TraceDebug <string, TimeSpan, TimeSpan>((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database '{0}' has no catalogs in healthy state but launching the recovery workflow is being skipped due to either initial suppression of {1}, or periodic suppression of {2}.", name, AutoReseedWorkflowSuppression.s_ciRebuildSuppresionInterval, AutoReseedWorkflowSuppression.s_ciRebuildRetryInterval); return; } } if (!this.TryLaunchCatalogAutoReseedWorkflow(context, name)) { this.m_suppression.ReportNoWorkflowsNeedToLaunch(guid); } return; } if (context.TargetCopyStatus.IsActive) { AutoReseedWorkflowLauncher.Tracer.TraceDebug <string, AmServerName>((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database copy '{0}\\{1}' is 'FailedAndSuspended' but active. Skipping.", name, context.TargetServerName); return; } if (this.m_suppression.ReportWorkflowLaunchConditionMet(AutoReseedWorkflowType.FailedSuspendedCopyAutoReseed, guid, CatalogAutoReseedWorkflow.CatalogAutoReseedReason.None, 1)) { AutoReseedWorkflowLauncher.Tracer.TraceDebug <string, AmServerName, TimeSpan>((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database copy '{0}\\{1}' has been FailedAndSuspended for at least {2}. Launching the recovery workflow.", name, context.TargetServerName, AutoReseedWorkflowSuppression.s_dbReseedSuppresionInterval); FailedSuspendedCopyAutoReseedWorkflow failedSuspendedCopyAutoReseedWorkflow = new FailedSuspendedCopyAutoReseedWorkflow(context, context.TargetCopyStatus.CopyStatus.ErrorMessage); failedSuspendedCopyAutoReseedWorkflow.Execute(); return; } AutoReseedWorkflowLauncher.Tracer.TraceDebug((long)this.GetHashCode(), "AutoReseedWorkflowLauncher.BeginAutoReseedIfNecessary: Database copy '{0}\\{1}' is FailedAndSuspended but launching the recovery workflow is being skipped due to either initial suppression of {2}, or periodic suppression of {3}.", new object[] { name, context.TargetServerName, AutoReseedWorkflowSuppression.s_dbReseedSuppresionInterval, AutoReseedWorkflowSuppression.s_dbReseedRetryInterval }); return; } }
internal Error(ExtendedErrorInfo errorValue) { ErrorValue = errorValue; }
// Token: 0x06000D0C RID: 3340 RVA: 0x00039794 File Offset: 0x00037994 private void SetState(FailureInfo.FailureFlags failureState, uint?errorEventId, LocalizedString errorMessage, ExtendedErrorInfo extendedErrorInfo) { lock (this) { this.m_brokenFlags = failureState; this.m_errorEventId = errorEventId; this.m_errorMessage = errorMessage; this.m_extendedErrorInfo = extendedErrorInfo; } }
// Token: 0x06000D08 RID: 3336 RVA: 0x000396CE File Offset: 0x000378CE public void SetBroken(ExEventLog.EventTuple eventTuple, LocalizedString errorMessage, ExtendedErrorInfo extendedErrorInfo) { this.SetState(FailureInfo.FailureFlags.Failed, new uint?(DiagCore.GetEventViewerEventId(eventTuple)), errorMessage, extendedErrorInfo); }
// Token: 0x06000D07 RID: 3335 RVA: 0x000396C2 File Offset: 0x000378C2 public void SetBroken(uint?eventId, LocalizedString errorMessage, ExtendedErrorInfo extendedErrorInfo) { this.SetState(FailureInfo.FailureFlags.Failed, eventId, errorMessage, extendedErrorInfo); }