public void Clear_IsDirty() { // Arrange var collection = new DocumentCollection <SubDocument>() { new SubDocument() }; collection.ClearStatus(); // Act collection.Clear(); var result = collection.IsDirty; // Assert Assert.IsTrue(result); }
public void Clear_IsDirty() { // Arrange var collection = new DocumentCollection<SubDocument> { new SubDocument() }; collection.ClearStatus(); // Act collection.Clear(); var result = collection.IsDirty; // Assert Assert.IsTrue(result); }
/// <summary> /// Executes the engine. This is the primary method that kicks off generation. /// </summary> public void Execute() { CheckDisposed(); Trace.Information($"Using {JsEngineSwitcher.Instance.DefaultEngineName} as the JavaScript engine"); // Make sure we've actually configured some pipelines if (_pipelines.Count == 0) { Trace.Error("No pipelines are configured. Please supply a configuration file, specify a recipe, or configure programmatically"); return; } // Do a check for the same input/output path if (FileSystem.InputPaths.Any(x => x.Equals(FileSystem.OutputPath))) { Trace.Warning("The output path is also one of the input paths which can cause unexpected behavior and is usually not advised"); } CleanTempPath(); // Clean the output folder if requested if (Settings.Bool(Keys.CleanOutputPath)) { CleanOutputPath(); } try { System.Diagnostics.Stopwatch engineStopwatch = System.Diagnostics.Stopwatch.StartNew(); using (Trace.WithIndent().Information("Executing {0} pipelines", _pipelines.Count)) { // Setup (clear the document collection and reset cache counters) DocumentCollection.Clear(); ExecutionCacheManager.ResetEntryHits(); // Enumerate pipelines and execute each in order Guid executionId = Guid.NewGuid(); int c = 1; foreach (IPipeline pipeline in _pipelines.Pipelines) { string pipelineName = pipeline.Name; System.Diagnostics.Stopwatch pipelineStopwatch = System.Diagnostics.Stopwatch.StartNew(); using (Trace.WithIndent().Information("Executing pipeline \"{0}\" ({1}/{2}) with {3} child module(s)", pipelineName, c, _pipelines.Count, pipeline.Count)) { try { ((ExecutionPipeline)pipeline).Execute(this, executionId); pipelineStopwatch.Stop(); Trace.Information( "Executed pipeline \"{0}\" ({1}/{2}) in {3} ms resulting in {4} output document(s)", pipelineName, c++, _pipelines.Count, pipelineStopwatch.ElapsedMilliseconds, DocumentCollection.FromPipeline(pipelineName).Count()); } catch (Exception) { Trace.Error("Error while executing pipeline {0}", pipelineName); throw; } } } // Clean up (clear unhit cache entries, dispose documents) // Note that disposing the documents immediately after engine execution will ensure write streams get flushed and released // but will also mean that callers (and tests) can't access documents and document content after the engine finishes // Easiest way to access content after engine execution is to add a final Meta module and copy content to metadata ExecutionCacheManager.ClearUnhitEntries(); foreach (IPipeline pipeline in _pipelines.Pipelines) { ((ExecutionPipeline)pipeline).ResetClonedDocuments(); } engineStopwatch.Stop(); Trace.Information( "Executed {0}/{1} pipelines in {2} ms", c - 1, _pipelines.Count, engineStopwatch.ElapsedMilliseconds); } } catch (Exception ex) { Trace.Critical("Exception during execution: {0}", ex.ToString()); throw; } }
public void Execute() { CheckDisposed(); // Configure with defaults if not already configured if (_configurator == null) { Configure(); } // Create the input and output folders if they don't already exist if (!Directory.Exists(InputFolder)) { Directory.CreateDirectory(InputFolder); } if (!Directory.Exists(OutputFolder)) { Directory.CreateDirectory(OutputFolder); } try { Stopwatch engineStopwatch = Stopwatch.StartNew(); using (Trace.WithIndent().Information("Executing {0} pipelines", _pipelines.Count)) { // Setup (clear the document collection and reset cache counters) DocumentCollection.Clear(); ExecutionCacheManager.ResetEntryHits(); // Enumerate pipelines and execute each in order int c = 1; foreach (Pipeline pipeline in _pipelines.Pipelines) { Stopwatch pipelineStopwatch = Stopwatch.StartNew(); using (Trace.WithIndent().Information("Executing pipeline \"{0}\" ({1}/{2}) with {3} child module(s)", pipeline.Name, c, _pipelines.Count, pipeline.Count)) { pipeline.Execute(); pipelineStopwatch.Stop(); Trace.Information("Executed pipeline \"{0}\" ({1}/{2}) in {3} ms resulting in {4} output document(s)", pipeline.Name, c++, _pipelines.Count, pipelineStopwatch.ElapsedMilliseconds, DocumentCollection.FromPipeline(pipeline.Name).Count()); } } // Clean up (clear unhit cache entries, dispose documents) // Note that disposing the documents immediately after engine execution will ensure write streams get flushed and released // but will also mean that callers (and tests) can't access documents and document content after the engine finishes // Easiest way to access content after engine execution is to add a final Meta module and copy content to metadata ExecutionCacheManager.ClearUnhitEntries(this); foreach (Pipeline pipeline in _pipelines.Pipelines) { pipeline.ResetClonedDocuments(); } engineStopwatch.Stop(); Trace.Information("Executed {0} pipelines in {1} ms", _pipelines.Count, engineStopwatch.ElapsedMilliseconds); } } catch (Exception ex) { Trace.Verbose("Exception while executing pipelines: {0}", ex); throw; } }
public void Execute() { CheckDisposed(); // Configure with defaults if not already configured if (!_config.Configured) { Configure(); } // Clean the output folder if requested if (Settings.CleanOutputPath) { CleanOutputPath(); } // Create the output folder if it doesn't already exist IDirectory outputDirectory = FileSystem.GetOutputDirectory(); if (!outputDirectory.Exists) { outputDirectory.Create(); } try { System.Diagnostics.Stopwatch engineStopwatch = System.Diagnostics.Stopwatch.StartNew(); using (Trace.WithIndent().Information("Executing {0} pipelines", _pipelines.Count)) { // Setup (clear the document collection and reset cache counters) DocumentCollection.Clear(); ExecutionCacheManager.ResetEntryHits(); // Enumerate pipelines and execute each in order int c = 1; foreach (Pipeline pipeline in _pipelines.Pipelines) { string pipelineName = pipeline.Name; System.Diagnostics.Stopwatch pipelineStopwatch = System.Diagnostics.Stopwatch.StartNew(); using (Trace.WithIndent().Information("Executing pipeline \"{0}\" ({1}/{2}) with {3} child module(s)", pipelineName, c, _pipelines.Count, pipeline.Count)) { try { pipeline.Execute(this); pipelineStopwatch.Stop(); Trace.Information("Executed pipeline \"{0}\" ({1}/{2}) in {3} ms resulting in {4} output document(s)", pipelineName, c++, _pipelines.Count, pipelineStopwatch.ElapsedMilliseconds, DocumentCollection.FromPipeline(pipelineName).Count()); } catch (Exception) { Trace.Error("Error while executing pipeline {0}", pipelineName); throw; } } } // Clean up (clear unhit cache entries, dispose documents) // Note that disposing the documents immediately after engine execution will ensure write streams get flushed and released // but will also mean that callers (and tests) can't access documents and document content after the engine finishes // Easiest way to access content after engine execution is to add a final Meta module and copy content to metadata ExecutionCacheManager.ClearUnhitEntries(); foreach (Pipeline pipeline in _pipelines.Pipelines) { pipeline.ResetClonedDocuments(); } engineStopwatch.Stop(); Trace.Information("Executed {0}/{1} pipelines in {2} ms", c - 1, _pipelines.Count, engineStopwatch.ElapsedMilliseconds); } } catch (Exception ex) { Trace.Critical("Exception during execution: {0}", ex.ToString()); throw; } }