public void When_Log() { ILogger logger = new BlobLogger("logtest", ""); logger.LogDebug("hello1"); logger.LogDebug("hello2"); logger.LogDebug("hello3"); }
/// <summary> /// Loads stateful entities. /// </summary> /// <typeparam name="TEntity">The type of the entity.</typeparam> /// <param name="category">The category.</param> /// <param name="stateCategory">The state category.</param> /// <param name="kind">The reactive entity kind.</param> /// <param name="onLoading">The function is called for each loading entity.</param> /// <param name="onError">Function to report an error.</param> /// <param name="blobLogger">The blob logger to write raw recovery blobs to.</param> /// <param name="token">Cancellation token.</param> private void LoadStatefulEntities <TEntity>( string category, string stateCategory, ReactiveEntityKind kind, Action <TEntity, Stream> onLoading, Action <string, TEntity, Exception> onError, BlobLogger blobLogger, CancellationToken token) where TEntity : ReactiveEntity { Debug.Assert(!string.IsNullOrEmpty(stateCategory), "Category should not be null or empty."); Debug.Assert(onLoading != null, "onLoading should not be null."); LoadDefinitions <TEntity>(category, kind, entity => { var key = entity.Uri.ToCanonicalString(); var stopwatch = Stopwatch.StartNew(); if (!_reader.TryGetItemReader(stateCategory, key, out Stream stateStream)) { // Stateless entity, i.e. no state has been written yet. // At the very least, there will always be a header if anything has been written. stateStream = null; } else { entity.SetMetric(EntityMetric.ReadState, stopwatch.Elapsed); blobLogger.Append(stateCategory, key, stateStream); } using (stateStream) // notice null is fine for a C# using statement { try { onLoading(entity, stateStream); } catch (MitigationBailOutException) { throw; } catch (Exception ex) { _engine.Parent.TraceSource.Recovery_LoadingStateFailure(_engine.Parent.Uri, category, stateCategory, key, ex.Message); throw; } } }, onError, blobLogger, token); }
/// <summary> /// Loads the state. /// </summary> public void Load(CancellationToken token) { token.ThrowIfCancellationRequested(); Debug.Assert(_registry != null, "Registry should not be null."); Debug.Assert(_reader != null, "Reader should not be null."); var unhandled = new ConcurrentBag <Exception>(); using (var blobLogger = new BlobLogger(_engine, token)) { Load(blobLogger, unhandled.Add, token); Start(unhandled.Add, token); Summarize(); } if (!unhandled.IsEmpty) { throw new AggregateException(unhandled); } }
static void Main(string[] args) { var constr = "<your blob connection string here>"; var container = "logs"; var blobName = "test.log"; using (var logger = new BlobLogger(constr, container, blobName)) { logger.Append("TEXT log test.", LogLevel.Text); logger.Append("DEBUG log test.", LogLevel.Debug); logger.Append("INFO log test.", LogLevel.Information); logger.Append("WARN log test.", LogLevel.Warning); logger.Append("ERROR log test.", LogLevel.Error); logger.Append("FATAL log test.", LogLevel.Fatal); logger.Flush().Wait(); try { var b = 0; var a = 1 / b; } catch (Exception ex) { logger.Append(ex, LogLevel.Error); } try { var c = new[] { 1, 2, 3 }; var d = c[3]; }catch (Exception ex) { logger.Append(ex, LogLevel.Error); } logger.Append(new Exception("System Exception Test"), LogLevel.Error); Parallel.Invoke( logger.Flush().Wait, logger.Flush().Wait );; } }
private void Load(BlobLogger blobLogger, Action <Exception> unhandled, CancellationToken token) { var sw = Stopwatch.StartNew(); var trace = _engine.Parent.TraceSource; var onError = new Func <ReactiveEntityKind, Action <string, IReactiveResource, Exception> >(kind => (key, entity, ex) => { HandleError(kind, key, entity, ex, unhandled); }); trace.Recovery_LoadStarted(_engine.Parent.Uri); LoadDefinitions <OtherDefinitionEntity>(Category.Templates, ReactiveEntityKind.Other, template => { StateOperationTracer.Trace(trace, TraceVerb.Load, TraceNoun.Other | TraceNoun.Definition, template.Uri, () => { Add(_engine.Registry.Other, template); // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.Other, template); template.AdvanceState(TransactionState.Active); }); }, onError(ReactiveEntityKind.Other), blobLogger, token); LoadDefinitions <OtherDefinitionEntity>(Category.Templates, ReactiveEntityKind.Other, template => { StateOperationTracer.Trace(trace, TraceVerb.Load, TraceNoun.Template | TraceNoun.Definition, template.Uri, () => { Add(_engine.Registry.Templates, template); // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.Templates, template); template.AdvanceState(TransactionState.Active); }); }, onError(ReactiveEntityKind.Template), blobLogger, token); LoadDefinitions <ObserverDefinitionEntity>(Category.Observers, ReactiveEntityKind.Observer, observer => { StateOperationTracer.Trace(trace, TraceVerb.Load, TraceNoun.Observer | TraceNoun.Definition, observer.Uri, () => { _engine.DefineObserverCore(observer); // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.Observers, observer); observer.AdvanceState(TransactionState.Active); }); }, onError(ReactiveEntityKind.Observer), blobLogger, token); LoadDefinitions <ObservableDefinitionEntity>(Category.Observables, ReactiveEntityKind.Observable, observable => { StateOperationTracer.Trace(trace, TraceVerb.Load, TraceNoun.Observable | TraceNoun.Definition, observable.Uri, () => { _engine.DefineObservableCore(observable); // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.Observables, observable); observable.AdvanceState(TransactionState.Active); }); }, onError(ReactiveEntityKind.Observable), blobLogger, token); LoadDefinitions <StreamFactoryDefinitionEntity>(Category.SubjectFactories, ReactiveEntityKind.StreamFactory, factory => { StateOperationTracer.Trace(trace, TraceVerb.Load, TraceNoun.SubjectFactory | TraceNoun.Definition, factory.Uri, () => { _engine.DefineSubjectFactoryCore(factory); // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.SubjectFactories, factory); factory.AdvanceState(TransactionState.Active); }); }, onError(ReactiveEntityKind.StreamFactory), blobLogger, token); LoadDefinitions <SubscriptionFactoryDefinitionEntity>(Category.SubscriptionFactories, ReactiveEntityKind.SubscriptionFactory, factory => { StateOperationTracer.Trace(trace, TraceVerb.Load, TraceNoun.SubscriptionFactory | TraceNoun.Definition, factory.Uri, () => { _engine.DefineSubscriptionFactoryCore(factory); // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.SubscriptionFactories, factory); factory.AdvanceState(TransactionState.Active); }); }, onError(ReactiveEntityKind.SubscriptionFactory), blobLogger, token); LoadStatefulEntities <SubjectEntity>(Category.Subjects, Category.SubjectsRuntimeState, ReactiveEntityKind.Stream, (subject, stream) => { StateOperationTracer.Trace(trace, TraceVerb.Load, TraceNoun.Subject | TraceNoun.State, subject.Uri, () => { if (stream == null) { trace.Recovery_LoadSubjectWithoutState(subject.Uri, _engine.Parent.Uri); _engine.CreateStreamCore(subject, state: null, recovering: true); } else { var policy = _engine.Parent._serializationPolicy; using var stateReaderFactory = new OperatorStateReaderFactory(stream, policy); stateReaderFactory.ReadHeader(); _engine.CreateStreamCore(subject, state: stateReaderFactory, recovering: true); stateReaderFactory.ReadFooter(); } // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.Subjects, subject); subject.AdvanceState(TransactionState.Active); }); }, onError(ReactiveEntityKind.Stream), blobLogger, token); LoadDefinitions <ReliableSubscriptionEntity>(Category.ReliableSubscriptions, ReactiveEntityKind.ReliableSubscription, reliableSub => { // Empty - start happens in the next stage; just populating the registry here // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.ReliableSubscriptions, reliableSub); reliableSub.AdvanceState(TransactionState.Active); }, onError(ReactiveEntityKind.ReliableSubscription), blobLogger, token); LoadStatefulEntities <SubscriptionEntity>(Category.Subscriptions, Category.SubscriptionsRuntimeState, ReactiveEntityKind.Subscription, (subscription, stream) => { StateOperationTracer.Trace(trace, TraceVerb.Load, TraceNoun.Subscription | TraceNoun.State, subscription.Uri, () => { // // Notice we add the entity to the registry first. This enables the case where // the Regenerate mitigation is used during load failures, resulting in a fresh // entity in the proper QE registry, which we need to see for the subsequent // call to Start made by the next phase of recovery using the shadow registry. // // This has one side-effect though, namely that we can end up with an entity in // the shadow registry that we fail to load, but yet we'll attempt to start it. // Right now, this works out fine because the entity's Instance property will // not be set if the load phase fails, so the Start phase will skip it. Going // forward we should mediate all state changes through the entities, such that // we can keep track of state transitions. Failure to load would leave it in a // LoadFailed state, preventing a Start request from transitioning to Starting // and Started. With this, we may be able to move mitigations outside the core // code path as well and use a registry artifact analysis to find artifacts that // are in various *Failed states. // // TODO: add more phasing so we can populate the shadow registry first, check consistency, define/create, check more consistency, start Add(_registry.Subscriptions, subscription); subscription.AdvanceState(TransactionState.Active); if (stream == null) { trace.Recovery_LoadSubscriptionWithoutState(subscription.Uri, _engine.Parent.Uri); _engine.CreateSubscriptionCore(subscription, state: null, recovering: true); } else { var policy = _engine.Parent._serializationPolicy; using var stateReaderFactory = new OperatorStateReaderFactory(stream, policy); stateReaderFactory.ReadHeader(); _engine.CreateSubscriptionCore(subscription, state: stateReaderFactory, recovering: true); stateReaderFactory.ReadFooter(); } }); }, onError(ReactiveEntityKind.Subscription), blobLogger, token); trace.Recovery_LoadCompleted(_engine.Parent.Uri, sw.ElapsedMilliseconds); }
/// <summary> /// Loads the definitions. /// </summary> /// <typeparam name="TEntity">The type of the entity.</typeparam> /// <param name="category">The category.</param> /// <param name="kind">Reactive entity kind.</param> /// <param name="onLoading">The function is called for each loading entity.</param> /// <param name="onError">Function to report an error.</param> /// <param name="blobLogger">The blob logger to write raw recovery blobs to.</param> /// <param name="token">Cancellation token</param> private void LoadDefinitions <TEntity>( string category, ReactiveEntityKind kind, Action <TEntity> onLoading, Action <string, TEntity, Exception> onError, BlobLogger blobLogger, CancellationToken token) where TEntity : ReactiveEntity { Debug.Assert(!string.IsNullOrEmpty(category), "Category should not be null or empty."); Debug.Assert(onLoading != null, "onLoading should not be null."); var trace = _engine.Parent.TraceSource; trace.Recovery_LoadingDefinitionsStarted(_engine.Parent.Uri, category); if (!_reader.TryGetItemKeys(category, out IEnumerable <string> entities)) { entities = Array.Empty <string>(); } var total = 0; var failed = 0; Parallel.ForEach( entities, new ParallelOptions { MaxDegreeOfParallelism = _engine.Parent.Options.RecoveryDegreeOfParallelism, TaskScheduler = RecoveryScheduler.Default, CancellationToken = token, }, key => { var entity = default(TEntity); try { var stopwatch = Stopwatch.StartNew(); if (!_reader.TryGetItemReader(category, key, out Stream stream)) { throw new InvalidOperationException(string.Format(CultureInfo.InvariantCulture, "No items in key '{0}' for category '{1}'.", key, category)); } var elapsedReading = stopwatch.Elapsed; blobLogger.Append(category, key, stream); var policy = _engine.Parent._serializationPolicy; stopwatch.Restart(); using (var reader = new EntityReader(stream, _engine.Registry, policy)) { reader.ReadHeader(); _engine.TryMitigate( () => entity = (TEntity)reader.Load(kind), ReactiveEntity.CreateInvalidInstance(new Uri(key), kind), true, _placeholderMitigator); reader.ReadFooter(); } var elapsedLoading = stopwatch.Elapsed; entity.SetMetric(EntityMetric.ReadEntity, elapsedReading); entity.SetMetric(EntityMetric.LoadEntity, elapsedLoading); onLoading(entity); } catch (MitigationBailOutException) { } #pragma warning disable CA1031 // Do not catch general exception types. (By design; mitigation callback is the "handler".) catch (Exception ex) { Interlocked.Increment(ref failed); trace.Recovery_LoadingDefinitionsFailure(_engine.Parent.Uri, category, key, ex.Message); onError(key, entity, ex); } #pragma warning restore CA1031 Interlocked.Increment(ref total); }); token.ThrowIfCancellationRequested(); trace.Recovery_LoadingDefinitionsCompleted(_engine.Parent.Uri, category, total, failed); }