private void ResolveTemplates(IList <IVariable> variables, String defaultRoot, CancellationToken cancellationToken, ref Int32 fileCount) { for (int i = 0; i < (variables?.Count ?? 0);) { if (variables[i] is VariablesTemplateReference) { // Load the template. var reference = variables[i] as VariablesTemplateReference; String templateFilePath = m_fileProvider.ResolvePath(defaultRoot: defaultRoot, path: reference.Name); PipelineFile <VariablesTemplate> templateFile = LoadFile <VariablesTemplate, VariablesTemplateConverter>(templateFilePath, reference.Parameters, cancellationToken, ref fileCount); VariablesTemplate template = templateFile.Object; // Merge the template. variables.RemoveAt(i); if (template.Variables != null) { foreach (IVariable variable in template.Variables) { variables.Insert(i, variable); } i += template.Variables.Count; } } else { i++; } } }
private void ResolveTemplates(IList <IStep> steps, String defaultRoot, CancellationToken cancellationToken, ref Int32 fileCount) { for (int i = 0; i < (steps?.Count ?? 0);) { if (steps[i] is StepsTemplateReference) { // Load the template. var reference = steps[i] as StepsTemplateReference; String templateFilePath = m_fileProvider.ResolvePath(defaultRoot: defaultRoot, path: reference.Name); PipelineFile <StepsTemplate> templateFile = LoadFile <StepsTemplate, StepsTemplateConverter>(templateFilePath, reference.Parameters, cancellationToken, ref fileCount); StepsTemplate template = templateFile.Object; // Merge the template. ApplyStepOverrides(reference.StepOverrides, template.Steps); steps.RemoveAt(i); if (template.Steps != null) { foreach (IStep step in template.Steps) { steps.Insert(i, step); } i += template.Steps.Count; } } else { i++; } } }
private void ResolveTemplates(IList <IPhase> phases, String defaultRoot, CancellationToken cancellationToken, ref Int32 fileCount) { for (int i = 0; i < (phases?.Count ?? 0);) { if (phases[i] is PhasesTemplateReference) { // Load the template. var reference = phases[i] as PhasesTemplateReference; String templateFilePath = m_fileProvider.ResolvePath(defaultRoot: defaultRoot, path: reference.Name); PipelineFile <PhasesTemplate> templateFile = LoadFile <PhasesTemplate, PhasesTemplateConverter>(templateFilePath, reference.Parameters, cancellationToken, ref fileCount); PhasesTemplate template = templateFile.Object; // Resolve template references within the template. if (template.Steps != null) { ResolveTemplates(template.Steps, defaultRoot: templateFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); } // Merge the template. ApplyStepOverrides(reference, template); phases.RemoveAt(i); if (template.Phases != null) { foreach (IPhase phase in template.Phases) { phases.Insert(i, phase); } i += template.Phases.Count; } else if (template.Steps != null) { var newPhase = new Phase { Steps = template.Steps }; phases.Insert(i, newPhase); i++; } } else { // Resolve nested template references. var phase = phases[i] as Phase; if (phase.Variables != null) { ResolveTemplates(phase.Variables, defaultRoot, cancellationToken, ref fileCount); } if (phase.Steps != null) { ResolveTemplates(phase.Steps, defaultRoot, cancellationToken, ref fileCount); } i++; } } }
public void returns_expected_variables_when_none_has_been_defined() { var sut = PipelineFile.Parse(new[] { "dummy:", " dummy", }); Assert.Empty(sut.Variables); }
public void parse_simple_step_returns_expected_name_of_step(string expected) { var sut = PipelineFile.Parse(new[] { $"{expected}:", " dummy", }); Assert.Equal( expected: new[] { expected }, actual: sut.Steps.Select(x => x.Name) ); }
public void steps_with_pre_step_can_have_actions() { var sut = PipelineFile.Parse(new[] { $"foo: dummy", " bar", }); Assert.Equal( expected: new[] { "bar" }, actual: sut.Steps.Single().Actions ); }
public void steps_can_also_have_multiple_pre_steps() { var sut = PipelineFile.Parse(new[] { $"foo: bar baz qux", " dummy", }); Assert.Equal( expected: new[] { "bar", "baz", "qux" }, actual: sut.Steps.Single().PreStepNames ); }
public void Execute(string path, PipelineFile formFile) { var files = Directory.GetFiles(path).OrderByDescending(File.GetCreationTime) .Skip(int.Parse(_configuration["FileNumber"])); foreach (var file in files) { if (File.Exists(file)) { File.Delete(file); } } }
private void ResolveTemplates(Process process, String defaultRoot, CancellationToken cancellationToken, ref Int32 fileCount) { if (process.Template != null) { // Load the template. String templateFilePath = m_fileProvider.ResolvePath(defaultRoot: defaultRoot, path: process.Template.Name); PipelineFile <ProcessTemplate> templateFile = LoadFile <ProcessTemplate, ProcessTemplateConverter>(templateFilePath, process.Template.Parameters, cancellationToken, ref fileCount); ProcessTemplate template = templateFile.Object; // Resolve template references within the template. if (template.Phases != null) { ResolveTemplates(template.Phases, defaultRoot: templateFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); } else if (template.Jobs != null) { ResolveTemplates(template.Jobs, defaultRoot: templateFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); } else if (template.Steps != null) { ResolveTemplates(template.Steps, defaultRoot: templateFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); } // Merge the template. ApplyStepOverrides(process.Template, template); process.Phases = template.Phases; process.Jobs = template.Jobs; process.Steps = template.Steps; process.Resources = MergeResources(process.Resources, template.Resources); process.Template = null; } // Resolve nested template references. else if (process.Phases != null) { ResolveTemplates(process.Phases, defaultRoot, cancellationToken, ref fileCount); } else if (process.Jobs != null) { ResolveTemplates(process.Jobs, defaultRoot, cancellationToken, ref fileCount); } else if (process.Variables != null) { ResolveTemplates(process.Variables, defaultRoot, cancellationToken, ref fileCount); } else if (process.Steps != null) { ResolveTemplates(process.Steps, defaultRoot, cancellationToken, ref fileCount); } }
public void parse_simple_step_with_single_action_returns_expected_action(string expected) { var sut = PipelineFile.Parse(new[] { $"dummy:", $" {expected}", }); var result = Assert.Single(sut.Steps); Assert.Equal( expected: new[] { expected }, actual: result.Actions ); }
public void valid_variable_values(string validValue) { var sut = PipelineFile.Parse(new[] { $"foo={validValue}", "dummy:", " dummy", }); Assert.Equal( expected: new Dictionary <string, string> { { "foo", validValue } }, actual: sut.Variables ); }
public void variables_can_have_spaces(string input, string expectedKey, string expectedValue) { var sut = PipelineFile.Parse(new[] { input, "dummy:", " dummy", }); Assert.Equal( expected: new Dictionary <string, string> { { expectedKey, expectedValue } }, actual: sut.Variables ); }
public void variables_can_have_trailing_comments() { var sut = PipelineFile.Parse(new[] { "foo=bar # this is a trailing comment!", "dummy:", " dummy", }); Assert.Equal( expected: new Dictionary <string, string> { { "foo", "bar" } }, actual: sut.Variables ); }
public void returns_expected_variables_when_single_has_been_defined() { var sut = PipelineFile.Parse(new[] { "foo=bar", "dummy:", " dummy", }); Assert.Equal( expected: new Dictionary <string, string> { { "foo", "bar" } }, actual: sut.Variables ); }
public void parse_simple_step_with_multiple_actions_returns_expected_actions() { var sut = PipelineFile.Parse(new[] { "dummy:", " foo", " bar", }); var result = Assert.Single(sut.Steps); Assert.Equal( expected: new [] { "foo", "bar" }, actual: result.Actions ); }
public void variable_values_can_be_quoted() { var sut = PipelineFile.Parse(new[] { $"foo=\"bar\"", "dummy:", " dummy", }); Assert.Equal( expected: new Dictionary <string, string> { { "foo", "bar" } }, actual: sut.Variables ); }
public void can_handle_empty_line_after_step_with_single_actions(string emptyLine) { var sut = PipelineFile.Parse(new[] { "dummy:", " foo", emptyLine, }); var result = Assert.Single(sut.Steps); Assert.Equal( expected: new [] { "foo" }, actual: result.Actions ); }
public void Execute(string path, PipelineFile formFile) { var files = Directory.GetFiles(path); foreach (var file in files) { var extension = file.Split(".").Last(); if (extension != _requiredExtension) { if (File.Exists(file)) { File.Delete(file); } } } }
public void can_handle_multi_line_action() { var sut = PipelineFile.Parse(new[] { "foo:", @" bar \", @" baz \", @" qux", }); Assert.Equal( expected: new[] { new Step("foo", new[] { "bar baz qux" }), }, actual: sut.Steps ); }
public void Execute(string path, PipelineFile formFile) { var args = $"{Path.Combine(path, formFile.FileName)} {Path.Combine(path, $"{formFile.Name}.pnm")}"; var process = new Process() { StartInfo = new ProcessStartInfo { FileName = "convert", Arguments = args, RedirectStandardOutput = true, UseShellExecute = false, CreateNoWindow = true, } }; process.Start(); process.StandardOutput.ReadToEnd(); process.WaitForExit(); }
public void can_handle_mix_of_single_and_multi_line_actions() { var sut = PipelineFile.Parse(new[] { "foo:", @" bar", @" baz1 \", @" baz2", @" qux", }); Assert.Equal( expected: new[] { new Step("foo", new[] { "bar", "baz1 baz2", "qux" }), }, actual: sut.Steps ); }
public void steps_with_pre_step_is_NOT_required_to_have_actions() { var sut = PipelineFile.Parse(new[] { $"foo: bar", }); var step = sut.Steps.Single(); Assert.Equal( expected: "foo", actual: step.Name ); Assert.Equal( expected: new[] { "bar" }, actual: step.PreStepNames ); }
public void parse_multiple_simple_steps_with_single_action_returns_expected() { var sut = PipelineFile.Parse(new[] { "foo:", " bar", "", "baz:", " qux", }); Assert.Equal( expected: new[] { new Step("foo", new[] { "bar" }), new Step("baz", new[] { "qux" }) }, actual: sut.Steps ); }
public String DeserializeAndSerialize(String defaultRoot, String path, IDictionary <String, Object> mustacheContext, CancellationToken cancellationToken) { Int32 fileCount = 0; // Load the target file. path = m_fileProvider.ResolvePath(defaultRoot: defaultRoot, path: path); PipelineFile <Process> processFile = LoadFile <Process, ProcessConverter>(path, mustacheContext, cancellationToken, ref fileCount); Process process = processFile.Object; ResolveTemplates(process, defaultRoot: processFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); // Serialize SerializerBuilder serializerBuilder = new SerializerBuilder(); serializerBuilder.DisableAliases(); serializerBuilder.WithTypeConverter(new ProcessConverter()); Serializer serializer = serializerBuilder.Build(); return(serializer.Serialize(process)); }
public void can_handle_comments() { var sut = PipelineFile.Parse(new[] { "# first comment", "foo:", " bar", "", " # second comment", "baz:", " qux", }); Assert.Equal( expected: new[] { new Step("foo", new[] { "bar" }), new Step("baz", new[] { "qux" }) }, actual: sut.Steps ); }
public IActionResult Post() { var dir = Path.Combine(_environment.WebRootPath); if (HttpContext.Request.Form.Files != null) { IFormFileCollection files = HttpContext.Request.Form.Files; long size = files.Sum(f => f.Length); List <string> ids = new List <string>(); foreach (IFormFile file in files) { if (file.Length > 0) { var id = Guid.NewGuid(); PipelineFile pFile = new PipelineFile(file); pFile.Rename(id.ToString()); var filePath = Path.Combine(dir, pFile.FileName); using (FileStream fs = System.IO.File.Create(filePath)) { file.CopyTo(fs); fs.Flush(); } _pipeline.Run(dir, pFile); ids.Add(GetExternalFileUrl($"{id}.svg")); } } return(Ok(new { count = files.Count, size, filePath = dir, files = ids })); } return(Ok(new { count = 0, size = 0, filePath = dir })); }
// TODO: CHANGE THIS TO PUBLIC WHEN SWITCH RETURN TYPES internal Process LoadInternal(String defaultRoot, String path, IDictionary <String, Object> mustacheContext, CancellationToken cancellationToken) { Int32 fileCount = 0; // Load the target file. path = m_fileProvider.ResolvePath(defaultRoot: defaultRoot, path: path); PipelineFile <Process> processFile = LoadFile <Process, ProcessConverter>(path, mustacheContext, cancellationToken, ref fileCount); Process process = processFile.Object; ResolveTemplates(process, defaultRoot: processFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); // Create implied levels for the process. if (process.Steps != null) { var newPhase = new Phase { Name = process.Name, Condition = process.Condition, ContinueOnError = process.ContinueOnError, DependsOn = process.DependsOn, EnableAccessToken = process.EnableAccessToken, Steps = process.Steps, Target = process.Target, Variables = process.Variables, }; process.Phases = new List <IPhase>(); process.Phases.Add(newPhase); process.Condition = null; process.ContinueOnError = null; process.DependsOn = null; process.EnableAccessToken = null; process.Steps = null; process.Target = null; process.Variables = null; } // Convert "checkout" steps into variables. if (process.Phases != null) { foreach (Phase phase in process.Phases) { if (phase.Steps != null && phase.Steps.Count > 0) { if (phase.Steps[0] is CheckoutStep) { if (phase.Variables == null) { phase.Variables = new List <IVariable>(); } foreach (Variable variable in (phase.Steps[0] as CheckoutStep).GetVariables(process.Resources)) { phase.Variables.Add(variable); } phase.Steps.RemoveAt(0); } // Validate "checkout" is only used as the first step within a phase. if (phase.Steps.Any(x => x is CheckoutStep)) { throw new Exception($"Step '{YamlConstants.Checkout}' is currently only supported as the first step within a phase."); } } } } // Record all known phase names. var knownPhaseNames = new HashSet <String>(StringComparer.OrdinalIgnoreCase); if (process.Phases != null) { foreach (Phase phase in process.Phases) { knownPhaseNames.Add(phase.Name); } } // Generate missing names. Int32?nextPhase = null; if (process.Phases != null) { foreach (Phase phase in process.Phases) { if (String.IsNullOrEmpty(phase.Name)) { String candidateName = String.Format(CultureInfo.InvariantCulture, "Phase{0}", nextPhase); while (!knownPhaseNames.Add(candidateName)) { nextPhase = (nextPhase ?? 1) + 1; candidateName = String.Format(CultureInfo.InvariantCulture, "Phase{0}", nextPhase); } phase.Name = candidateName; } } } m_trace.Verbose("{0}", new TraceObject <Process, ProcessConverter>("After resolution", process)); return(process); }
private void ResolveTemplates(List <IJob> jobs, String defaultRoot, CancellationToken cancellationToken, ref Int32 fileCount) { for (int i = 0; i < (jobs?.Count ?? 0);) { if (jobs[i] is JobsTemplateReference) { // Load the template. var reference = jobs[i] as JobsTemplateReference; String templateFilePath = m_fileProvider.ResolvePath(defaultRoot: defaultRoot, path: reference.Name); PipelineFile <JobsTemplate> templateFile = LoadFile <JobsTemplate, JobsTemplateConverter>(templateFilePath, reference.Parameters, cancellationToken, ref fileCount); JobsTemplate template = templateFile.Object; // Resolve template references within the template. if (template.Jobs != null) { foreach (Job job in template.Jobs) { if (job.Variables != null) { ResolveTemplates(job.Variables, defaultRoot: templateFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); } } } else if (template.Steps != null) { ResolveTemplates(template.Steps, defaultRoot: templateFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); } // Merge the template. ApplyStepOverrides(reference, template); jobs.RemoveAt(i); if (template.Jobs != null) { jobs.InsertRange(i, template.Jobs); i += template.Jobs.Count; } else if (template.Steps != null) { var newJob = new Job { Steps = template.Steps }; jobs.Insert(i, newJob); i++; } } else { // Resolve nested template references. var job = jobs[i] as Job; if (job.Variables != null) { ResolveTemplates(job.Variables, defaultRoot, cancellationToken, ref fileCount); } if (job.Steps != null) { ResolveTemplates(job.Steps, defaultRoot, cancellationToken, ref fileCount); } i++; } } }
public Process Load(String defaultRoot, String path, IDictionary <String, Object> mustacheContext, CancellationToken cancellationToken) { Int32 fileCount = 0; // Load the target file. path = m_fileProvider.ResolvePath(defaultRoot: defaultRoot, path: path); PipelineFile <Process> processFile = LoadFile <Process, ProcessConverter>(path, mustacheContext, cancellationToken, ref fileCount); Process process = processFile.Object; ResolveTemplates(process, defaultRoot: processFile.Directory, cancellationToken: cancellationToken, fileCount: ref fileCount); // Create implied levels for the process. if (process.Jobs != null) { var newPhase = new Phase { Jobs = process.Jobs, Name = process.Name }; process.Phases = new List <IPhase>(); process.Phases.Add(newPhase); process.Jobs = null; } else if (process.Steps != null) { var newJob = new Job { Steps = process.Steps, Name = process.Name, Variables = process.Variables }; var newPhase = new Phase { Jobs = new List <IJob>() }; newPhase.Jobs.Add(newJob); process.Phases = new List <IPhase>(); process.Phases.Add(newPhase); process.Steps = null; } var knownPhaseNames = new HashSet <String>(StringComparer.OrdinalIgnoreCase); var knownJobNames = new HashSet <String>(StringComparer.OrdinalIgnoreCase); if (process.Phases != null) { foreach (Phase phase in process.Phases) { // Create implied levels for the phase. if (phase.Steps != null) { var newJob = new Job { Steps = phase.Steps }; phase.Jobs = new List <IJob>(new IJob[] { newJob }); phase.Steps = null; } // Record all known phase/job names. knownPhaseNames.Add(phase.Name); if (phase.Jobs != null) { foreach (Job job in phase.Jobs) { knownJobNames.Add(job.Name); } } } } // Generate missing names. Int32?nextPhase = null; Int32?nextJob = null; if (process.Phases != null) { foreach (Phase phase in process.Phases) { if (String.IsNullOrEmpty(phase.Name)) { String candidateName = String.Format(CultureInfo.InvariantCulture, "Phase{0}", nextPhase); while (!knownPhaseNames.Add(candidateName)) { nextPhase = (nextPhase ?? 1) + 1; candidateName = String.Format(CultureInfo.InvariantCulture, "Phase{0}", nextPhase); } phase.Name = candidateName; } if (phase.Jobs != null) { foreach (Job job in phase.Jobs) { if (String.IsNullOrEmpty(job.Name)) { String candidateName = String.Format(CultureInfo.InvariantCulture, "Build{0}", nextJob); while (!knownPhaseNames.Add(candidateName)) { nextJob = (nextJob ?? 1) + 1; candidateName = String.Format(CultureInfo.InvariantCulture, "Build{0}", nextJob); } job.Name = candidateName; } } } } } m_trace.Verbose("{0}", new TraceObject <Process, ProcessConverter>("After resolution", process)); return(process); }
private PipelineFile <TObject> LoadFile <TObject, TConverter>(String path, IDictionary <String, Object> mustacheContext, CancellationToken cancellationToken, ref Int32 fileCount) where TConverter : IYamlTypeConverter, new() { fileCount++; if (m_options.MaxFiles > 0 && fileCount > m_options.MaxFiles) { throw new FormatException(TaskResources.YamlFileCount(m_options.MaxFiles)); } cancellationToken.ThrowIfCancellationRequested(); FileData file = m_fileProvider.GetFile(path); String mustacheReplaced; StringReader reader = null; CancellationTokenSource mustacheCancellationTokenSource = null; try { // Read front-matter IDictionary <String, Object> frontMatter = null; reader = new StringReader(file.Content); String line = reader.ReadLine(); if (!String.Equals(line, "---", StringComparison.Ordinal)) { // No front-matter. Reset the reader. reader.Dispose(); reader = new StringReader(file.Content); } else { // Deseralize front-matter. cancellationToken.ThrowIfCancellationRequested(); StringBuilder frontMatterBuilder = new StringBuilder(); while (true) { line = reader.ReadLine(); if (line == null) { throw new FormatException(TaskResources.YamlFrontMatterNotClosed(path)); } else if (String.Equals(line, "---", StringComparison.Ordinal)) { break; } else { frontMatterBuilder.AppendLine(line); } } var frontMatterDeserializer = new Deserializer(); try { frontMatter = frontMatterDeserializer.Deserialize <IDictionary <String, Object> >(frontMatterBuilder.ToString()); } catch (Exception ex) { throw new FormatException(TaskResources.YamlFrontMatterNotValid(path, ex.Message), ex); } } // Merge the mustache replace context. frontMatter = frontMatter ?? new Dictionary <String, Object>(); if (mustacheContext != null) { foreach (KeyValuePair <String, Object> pair in mustacheContext) { frontMatter[pair.Key] = pair.Value; } } // Prepare the mustache options. mustacheCancellationTokenSource = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); // var mustacheOptions = new MustacheEvaluationOptions // { // CancellationToken = mustacheCancellationTokenSource.Token, // EncodeMethod = MustacheEncodeMethods.JsonEncode, // MaxResultLength = m_options.MustacheEvaluationMaxResultLength, // }; // Parse the mustache template. cancellationToken.ThrowIfCancellationRequested(); var mustacheParser = new MustacheTemplateParser(useDefaultHandlebarHelpers: true, useCommonTemplateHelpers: true); MustacheExpression mustacheExpression = mustacheParser.Parse(template: reader.ReadToEnd()); // Limit the mustache evaluation time. if (m_options.MustacheEvaluationTimeout > TimeSpan.Zero) { mustacheCancellationTokenSource.CancelAfter(m_options.MustacheEvaluationTimeout); } try { // Perform the mustache evaluation. mustacheReplaced = mustacheExpression.Evaluate( replacementObject: frontMatter, additionalEvaluationData: null, parentContext: null, partialExpressions: null //options: mustacheOptions ); } catch (System.OperationCanceledException ex) when(mustacheCancellationTokenSource.IsCancellationRequested && !cancellationToken.IsCancellationRequested) { throw new System.OperationCanceledException(TaskResources.MustacheEvaluationTimeout(path, m_options.MustacheEvaluationTimeout.TotalSeconds), ex); } m_trace.Verbose("{0}", new TraceFileContent($"{file.Name} after mustache replacement", mustacheReplaced)); } finally { reader?.Dispose(); reader = null; mustacheCancellationTokenSource?.Dispose(); mustacheCancellationTokenSource = null; } // Deserialize DeserializerBuilder deserializerBuilder = new DeserializerBuilder(); deserializerBuilder.WithTypeConverter(new TConverter()); Deserializer deserializer = deserializerBuilder.Build(); TObject obj = deserializer.Deserialize <TObject>(mustacheReplaced); m_trace.Verbose("{0}", new TraceObject <TObject, TConverter>($"{file.Name} after deserialization ", obj)); var result = new PipelineFile <TObject> { Name = file.Name, Directory = file.Directory, Object = obj }; return(result); }