/// <summary> /// Validate that the CSDL metadata defined for a service matches the documentation. /// </summary> /// <param name="options"></param> /// <returns></returns> private static async Task <bool> CheckServiceMetadataAsync(CheckMetadataOptions options) { List <Schema> schemas = await TryGetMetadataSchemasAsync(options); if (null == schemas) { return(false); } FancyConsole.WriteLine(FancyConsole.ConsoleSuccessColor, " found {0} schema definitions: {1}", schemas.Count, (from s in schemas select s.Namespace).ComponentsJoinedByString(", ")); var docSet = await GetDocSetAsync(options); if (null == docSet) { return(false); } const string testname = "validate-service-metadata"; TestReport.StartTest(testname); List <ResourceDefinition> foundResources = ODataParser.GenerateResourcesFromSchemas(schemas); CheckResults results = new CheckResults(); List <ValidationError> collectedErrors = new List <ValidationError>(); foreach (var resource in foundResources) { FancyConsole.WriteLine(); FancyConsole.Write(FancyConsole.ConsoleHeaderColor, "Checking resource: {0}...", resource.Metadata.ResourceType); FancyConsole.VerboseWriteLine(); FancyConsole.VerboseWriteLine(resource.JsonExample); FancyConsole.VerboseWriteLine(); // Verify that this resource matches the documentation ValidationError[] errors; docSet.ResourceCollection.ValidateJsonExample(resource.Metadata, resource.JsonExample, out errors, new ValidationOptions { RelaxedStringValidation = true }); results.IncrementResultCount(errors); collectedErrors.AddRange(errors); await WriteOutErrorsAndFinishTestAsync(errors, options.SilenceWarnings, successMessage : " no errors."); } if (options.IgnoreWarnings) { results.ConvertWarningsToSuccess(); } var output = (from e in collectedErrors select e.ErrorText).ComponentsJoinedByString("\r\n"); await TestReport.FinishTestAsync(testname, results.WereFailures?TestOutcome.Failed : TestOutcome.Passed, stdOut : output); results.PrintToConsole(); return(!results.WereFailures); }
/// <summary> /// Execute the provided methods on the given account. /// </summary> /// <param name="options"></param> /// <param name="account"></param> /// <param name="methods"></param> /// <param name="docset"></param> /// <returns>True if the methods all passed, false if there were failures.</returns> private static async Task <bool> CheckMethodsForAccountAsync(CheckServiceOptions options, IServiceAccount account, MethodDefinition[] methods, DocSet docset) { //CheckResults results = new CheckResults(); ConfigureAdditionalHeadersForAccount(options, account); string testNamePrefix = account.Name.ToLower() + ": "; FancyConsole.WriteLine(FancyConsole.ConsoleHeaderColor, "Testing with account: {0}", account.Name); FancyConsole.WriteLine(FancyConsole.ConsoleCodeColor, "Preparing authentication for requests...", account.Name); try { await account.PrepareForRequestAsync(); } catch (Exception ex) { RecordError(ex.Message); return(false); } AuthenicationCredentials credentials = account.CreateCredentials(); int concurrentTasks = options.ParallelTests ? ParallelTaskCount : 1; CheckResults docSetResults = new CheckResults(); await ForEachAsync(methods, concurrentTasks, async method => { FancyConsole.WriteLine( FancyConsole.ConsoleCodeColor, "Running validation for method: {0}", method.Identifier); ScenarioDefinition[] scenarios = docset.TestScenarios.ScenariosForMethod(method); ValidationResults results = await method.ValidateServiceResponseAsync(scenarios, account, credentials); PrintResultsToConsole(method, account, results, options); await TestReport.LogMethodTestResults(method, account, results); docSetResults.RecordResults(results, options); if (concurrentTasks == 1) { AddPause(options); } }); if (options.IgnoreWarnings || options.SilenceWarnings) { // Remove the warning flag from the outcomes docSetResults.ConvertWarningsToSuccess(); } docSetResults.PrintToConsole(); bool hadWarnings = docSetResults.WarningCount > 0; bool hadErrors = docSetResults.FailureCount > 0; return(!(hadErrors | hadWarnings)); }