public void Parse_SetsExpectedValues_Raw() { // arrange string rawDescriptor = "-Nexus:2400, -B, XX → YY, Nexus:451,+A, XXX → YYY, invalidA →, → invalidB"; string[] expectedAdd = new[] { "Nexus:451", "A" }; string[] expectedRemove = new[] { "Nexus:2400", "B" }; IDictionary <string, string> expectedReplace = new Dictionary <string, string> { ["XX"] = "YY", ["XXX"] = "YYY" }; string[] expectedErrors = new[] { "Failed parsing ' invalidA →': can't map to a blank value. Use the '-value' format to remove a value.", "Failed parsing ' → invalidB': can't map from a blank old value. Use the '+value' format to add a value." }; // act ChangeDescriptor parsed = ChangeDescriptor.Parse(rawDescriptor, out string[] errors); // assert Assert.That(parsed.Add, Is.EquivalentTo(expectedAdd), $"{nameof(parsed.Add)} doesn't match the expected value."); Assert.That(parsed.Remove, Is.EquivalentTo(expectedRemove), $"{nameof(parsed.Replace)} doesn't match the expected value."); Assert.That(parsed.Replace, Is.EquivalentTo(expectedReplace), $"{nameof(parsed.Replace)} doesn't match the expected value."); Assert.That(errors, Is.EquivalentTo(expectedErrors), $"{nameof(errors)} doesn't match the expected value."); }
public void Parse_SetsExpectedValues_Formatted() { // arrange string rawDescriptor = "-1.0.1, -2.0-beta, 1.00 → 1.0, 1.0.0,+2.0-beta.15, 2.0 → 2.0-beta, invalidA →, → invalidB"; string[] expectedAdd = new[] { "1.0.0", "2.0.0-beta.15" }; string[] expectedRemove = new[] { "1.0.1", "2.0.0-beta" }; IDictionary <string, string> expectedReplace = new Dictionary <string, string> { ["1.00"] = "1.0.0", ["2.0.0"] = "2.0.0-beta" }; string[] expectedErrors = new[] { "Failed parsing ' invalidA →': can't map to a blank value. Use the '-value' format to remove a value.", "Failed parsing ' → invalidB': can't map from a blank old value. Use the '+value' format to add a value." }; // act ChangeDescriptor parsed = ChangeDescriptor.Parse( rawDescriptor, out string[] errors, formatValue: raw => SemanticVersion.TryParse(raw, out ISemanticVersion version) ? version.ToString() : raw ); // assert Assert.That(parsed.Add, Is.EquivalentTo(expectedAdd), $"{nameof(parsed.Add)} doesn't match the expected value."); Assert.That(parsed.Remove, Is.EquivalentTo(expectedRemove), $"{nameof(parsed.Replace)} doesn't match the expected value."); Assert.That(parsed.Replace, Is.EquivalentTo(expectedReplace), $"{nameof(parsed.Replace)} doesn't match the expected value."); Assert.That(errors, Is.EquivalentTo(expectedErrors), $"{nameof(errors)} doesn't match the expected value."); }
public string Apply_Raw(string input, string descriptor) { var parsed = ChangeDescriptor.Parse(descriptor, out string[] errors); Assert.IsEmpty(errors, "Parsing the descriptor failed."); return(parsed.ApplyToCopy(input)); }
public string ToString(string descriptor) { var parsed = ChangeDescriptor.Parse(descriptor, out string[] errors); Assert.IsEmpty(errors, "Parsing the descriptor failed."); return(parsed.ToString()); }
/// <summary>Get a semantic local version for update checks.</summary> /// <param name="version">The version to parse.</param> /// <param name="map">Changes to apply to the raw version, if any.</param> /// <param name="allowNonStandard">Whether to allow non-standard versions.</param> public ISemanticVersion GetMappedVersion(string version, ChangeDescriptor map, bool allowNonStandard) { // try mapped version string rawNewVersion = this.GetRawMappedVersion(version, map, allowNonStandard); if (SemanticVersion.TryParse(rawNewVersion, allowNonStandard, out ISemanticVersion parsedNew)) { return(parsedNew); } // return original version return(SemanticVersion.TryParse(version, allowNonStandard, out ISemanticVersion parsedOld) ? parsedOld : null); }
/// <summary>Get a semantic local version for update checks.</summary> /// <param name="version">The version to map.</param> /// <param name="map">Changes to apply to the raw version, if any.</param> /// <param name="allowNonStandard">Whether to allow non-standard versions.</param> private string GetRawMappedVersion(string version, ChangeDescriptor map, bool allowNonStandard) { if (version == null || map?.HasChanges != true) { return(version); } var mapped = new List <string> { version }; map.Apply(mapped); return(mapped.FirstOrDefault()); }
public static TrackedUseCase UpdateFirms() { var operationIdentity = new StrictOperationIdentity(UpdateIdentity.Instance, new EntitySet(EntityTypeFirm.Instance)); var changes = new[] { ChangeDescriptor.Create(EntityTypeFirm.Instance, 12, ChangeKind.Updated), ChangeDescriptor.Create(EntityTypeFirm.Instance, 13, ChangeKind.Updated) }; var operations = new[] { new OperationDescriptor(Guid.NewGuid(), operationIdentity, new OperationContext(DateTimeOffset.UtcNow, DateTime.UtcNow), new EntityChangesContext(changes)) }; var context = new UseCaseContext(DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, 0); var useCase = new TrackedUseCase(context, operations[0].Id, operations, new Dictionary <Guid, HashSet <Guid> >()); return(useCase); }
public void AddToCache(long maxRowDate, CachedRoutine newCachedRoutine, string lastUpdateByHostName, out ChangeDescriptor changeDescriptor) { changeDescriptor = null; if (this.Id == null) { this.Id = ShortId.Generate(); } if (this.CachedRoutineList == null) { this.CachedRoutineList = new List <CachedRoutine>(); } lock (CachedRoutineList) { // look for an existing item var existing = this.CachedRoutineList.Where(e => newCachedRoutine.Equals(e)).FirstOrDefault(); if (existing != null) { // if existing is not deleted but the update IS if (!existing.IsDeleted && newCachedRoutine.IsDeleted) { changeDescriptor = ChangeDescriptor.Create(lastUpdateByHostName, $"{newCachedRoutine.FullName} DROPPED"); this.CachedRoutineList.Remove(existing); // will be added again below } else if (existing.IsDeleted && newCachedRoutine.IsDeleted) // still deleted then nothing to do { return; } else if (existing.IsDeleted && !newCachedRoutine.IsDeleted) { // "undeleted" changeDescriptor = ChangeDescriptor.Create(lastUpdateByHostName, $"{newCachedRoutine.FullName} (RE)ADDED"); this.CachedRoutineList.Remove(existing); // will be added again below } else if (!newCachedRoutine.IsDeleted) { bool parametersUpdated = newCachedRoutine.ParametersHash != existing.ParametersHash; bool resultSetsUpdated = newCachedRoutine.ResultSetHash != existing.ResultSetHash; bool jsDALMetadataUpdated = false; if (existing.jsDALMetadata == null && newCachedRoutine.jsDALMetadata != null) { jsDALMetadataUpdated = true; } else if (existing.jsDALMetadata != null && newCachedRoutine.jsDALMetadata == null) { jsDALMetadataUpdated = true; } else if (newCachedRoutine.jsDALMetadata != null) { var newMatchesExisting = newCachedRoutine.jsDALMetadata.Equals(existing.jsDALMetadata); jsDALMetadataUpdated = newCachedRoutine.jsDALMetadata != null && !newMatchesExisting; } // no metadata related change if (!parametersUpdated && !resultSetsUpdated && !jsDALMetadataUpdated) { return; } this.CachedRoutineList.Remove(existing); // will be added again below var applicableChanges = new List <string>(); if (parametersUpdated) { applicableChanges.Add("PARAMETERS"); } if (resultSetsUpdated) { applicableChanges.Add("RESULT SETS"); } if (jsDALMetadataUpdated) { applicableChanges.Add("jsDAL metadata"); } changeDescriptor = ChangeDescriptor.Create(lastUpdateByHostName, $"{newCachedRoutine.FullName} UPDATED {string.Join(", ", applicableChanges.ToArray())}"); } } else { changeDescriptor = ChangeDescriptor.Create(lastUpdateByHostName, $"{newCachedRoutine.FullName} ADDED"); } this.CachedRoutineList.Add(newCachedRoutine); } }
/// <summary>Get the mod info for an update key.</summary> /// <param name="updateKey">The namespaced update key.</param> /// <param name="allowNonStandardVersions">Whether to allow non-standard versions.</param> /// <param name="mapRemoteVersions">The changes to apply to remote versions for update checks.</param> private async Task <ModInfoModel> GetInfoForUpdateKeyAsync(UpdateKey updateKey, bool allowNonStandardVersions, ChangeDescriptor mapRemoteVersions) { // get mod page IModPage page; { bool isCached = this.ModCache.TryGetMod(updateKey.Site, updateKey.ID, out Cached <IModPage> cachedMod) && !this.ModCache.IsStale(cachedMod.LastUpdated, cachedMod.Data.Status == RemoteModStatus.TemporaryError ? this.Config.Value.ErrorCacheMinutes : this.Config.Value.SuccessCacheMinutes); if (isCached) { page = cachedMod.Data; } else { page = await this.ModSites.GetModPageAsync(updateKey); this.ModCache.SaveMod(updateKey.Site, updateKey.ID, page); } } // get version info return(this.ModSites.GetPageVersions(page, updateKey.Subkey, allowNonStandardVersions, mapRemoteVersions)); }
/// <summary>Parse version info for the given mod page info.</summary> /// <param name="page">The mod page info.</param> /// <param name="subkey">The optional update subkey to match in available files. (If no file names or descriptions contain the subkey, it'll be ignored.)</param> /// <param name="mapRemoteVersions">The changes to apply to remote versions for update checks.</param> /// <param name="allowNonStandardVersions">Whether to allow non-standard versions.</param> public ModInfoModel GetPageVersions(IModPage page, string subkey, bool allowNonStandardVersions, ChangeDescriptor mapRemoteVersions) { // get base model ModInfoModel model = new ModInfoModel() .SetBasicInfo(page.Name, page.Url) .SetError(page.Status, page.Error); if (page.Status != RemoteModStatus.Ok) { return(model); } // fetch versions bool hasVersions = this.TryGetLatestVersions(page, subkey, allowNonStandardVersions, mapRemoteVersions, out ISemanticVersion mainVersion, out ISemanticVersion previewVersion); if (!hasVersions && subkey != null) { hasVersions = this.TryGetLatestVersions(page, null, allowNonStandardVersions, mapRemoteVersions, out mainVersion, out previewVersion); } if (!hasVersions) { return(model.SetError(RemoteModStatus.InvalidData, $"The {page.Site} mod with ID '{page.Id}' has no valid versions.")); } // return info return(model.SetVersions(mainVersion, previewVersion)); }
/********* ** Private methods *********/ /// <summary>Get the mod version numbers for the given mod.</summary> /// <param name="mod">The mod to check.</param> /// <param name="subkey">The optional update subkey to match in available files. (If no file names or descriptions contain the subkey, it'll be ignored.)</param> /// <param name="allowNonStandardVersions">Whether to allow non-standard versions.</param> /// <param name="mapRemoteVersions">The changes to apply to remote versions for update checks.</param> /// <param name="main">The main mod version.</param> /// <param name="preview">The latest prerelease version, if newer than <paramref name="main"/>.</param> private bool TryGetLatestVersions(IModPage mod, string subkey, bool allowNonStandardVersions, ChangeDescriptor mapRemoteVersions, out ISemanticVersion main, out ISemanticVersion preview) { main = null; preview = null; // parse all versions from the mod page IEnumerable <(string name, string description, ISemanticVersion version)> GetAllVersions() { if (mod != null) { ISemanticVersion ParseAndMapVersion(string raw) { raw = this.NormalizeVersion(raw); return(this.GetMappedVersion(raw, mapRemoteVersions, allowNonStandardVersions)); } // get mod version ISemanticVersion modVersion = ParseAndMapVersion(mod.Version); if (modVersion != null) { yield return(name : null, description : null, version : ParseAndMapVersion(mod.Version)); } // get file versions foreach (IModDownload download in mod.Downloads) { ISemanticVersion cur = ParseAndMapVersion(download.Version); if (cur != null) { yield return(download.Name, download.Description, cur); } } } } var versions = GetAllVersions() .OrderByDescending(p => p.version, SemanticVersionComparer.Instance) .ToArray(); // get main + preview versions void TryGetVersions(out ISemanticVersion mainVersion, out ISemanticVersion previewVersion, Func <(string name, string description, ISemanticVersion version), bool> filter = null) { mainVersion = null; previewVersion = null; // get latest main + preview version foreach (var entry in versions) { if (filter?.Invoke(entry) == false) { continue; } if (entry.version.IsPrerelease()) { previewVersion ??= entry.version; } else { mainVersion ??= entry.version; } if (mainVersion != null) { break; // any other values will be older } } // normalize values if (previewVersion is not null) { mainVersion ??= previewVersion; // if every version is prerelease, latest one is the main version if (!previewVersion.IsNewerThan(mainVersion)) { previewVersion = null; } } } if (subkey is not null) { TryGetVersions(out main, out preview, entry => entry.name?.Contains(subkey, StringComparison.OrdinalIgnoreCase) == true || entry.description?.Contains(subkey, StringComparison.OrdinalIgnoreCase) == true); } if (main is null) { TryGetVersions(out main, out preview); } return(main != null); }
public static void GenerateJsFileV2(string source, Endpoint endpoint, JsFile jsFile, Dictionary <string, ChangeDescriptor> fullChangeSet = null, bool rulesChanged = false) { var generateMetric = new Performance.ExecutionBase("GenerateJsFileV2"); var noChanges = false; try { // TODO: Figure out out casing on this property string jsNamespace = null;//endpoint.JsNamespace; if (string.IsNullOrWhiteSpace(jsNamespace)) { jsNamespace = endpoint.MetadataConnection.InitialCatalog; } var jsSafeNamespace = MakeNameJsSafe(jsNamespace); var routineContainerTemplate = WorkSpawner.TEMPLATE_RoutineContainer; var routineTemplate = WorkSpawner.TEMPLATE_Routine; var typescriptDefinitionsContainer = WorkSpawner.TEMPLATE_TypescriptDefinitions; endpoint.ApplyRules(jsFile); var includedRoutines = (from row in endpoint.CachedRoutines where !row.IsDeleted && (row.RuleInstructions[jsFile]?.Included ?? false == true) orderby row.FullName select row).ToList(); List <KeyValuePair <string, ChangeDescriptor> > changesInFile = null; if (fullChangeSet != null) { changesInFile = fullChangeSet.Where(change => includedRoutines.Count(inc => inc.FullName.Equals(change.Key, StringComparison.OrdinalIgnoreCase)) > 0).ToList(); // TODO: Consider if this is a good idea? // If we can reasonably say that there are no changes to routines that this JsFile cares about, why regenerate this file and why give it a new Version if (changesInFile.Count == 0) { noChanges = true; return; } } var jsSchemaLookupForJsFunctions = new Dictionary <string, List <string> /*Routine defs*/>(); var tsSchemaLookup = new Dictionary <string, List <string> /*Routine defs*/>(); var typeScriptParameterAndResultTypesSB = new StringBuilder(); var serverMethodPlugins = PluginLoader.Instance.PluginAssemblies .SelectMany(pa => pa.Plugins) .Where(p => p.Type == PluginType.ServerMethod && endpoint.Application.IsPluginIncluded(p.Guid.ToString())); var uniqueSchemas = new List <string>(); var mainLoopMetric = generateMetric.BeginChildStage("Main loop"); includedRoutines.ForEach(r => { try { if (r.TypescriptMethodStub == null) { r.PrecalculateJsGenerationValues(endpoint); } var jsSchemaName = JsFileGenerator.MakeNameJsSafe(r.Schema); var jsFunctionName = JsFileGenerator.MakeNameJsSafe(r.Routine); if (!jsSchemaLookupForJsFunctions.ContainsKey(jsSchemaName)) { jsSchemaLookupForJsFunctions.Add(jsSchemaName, new List <string>()); } if (!tsSchemaLookup.ContainsKey(jsSchemaName)) { tsSchemaLookup.Add(jsSchemaName, new List <string>()); } if (!uniqueSchemas.Contains(r.Schema)) { uniqueSchemas.Add(r.Schema); } var schemaIx = uniqueSchemas.IndexOf(r.Schema); // .js { var jsFunctionDefLine = routineTemplate.Replace("<<FUNC_NAME>>", jsFunctionName).Replace("<<SCHEMA_IX>>", schemaIx.ToString()).Replace("<<ROUTINE>>", r.Routine); if (r.Type.Equals(string.Intern("PROCEDURE"), StringComparison.OrdinalIgnoreCase)) { jsFunctionDefLine = jsFunctionDefLine.Replace("<<CLASS>>", "S"); } else { jsFunctionDefLine = jsFunctionDefLine.Replace("<<CLASS>>", "U"); } jsSchemaLookupForJsFunctions[jsSchemaName].Add(jsFunctionDefLine); } // .tsd { typeScriptParameterAndResultTypesSB.AppendLine(r.TypescriptParameterTypeDefinition); typeScriptParameterAndResultTypesSB.AppendLine(r.TypescriptOutputParameterTypeDefinition); typeScriptParameterAndResultTypesSB.AppendLine(r.TypescriptResultSetDefinitions); tsSchemaLookup[jsSchemaName].Add(r.TypescriptMethodStub); } } catch (Exception ex) { SessionLog.Exception(ex); // TODO: quit whole process } }); mainLoopMetric.End(); var finalSBMetric = generateMetric.BeginChildStage("Final SB"); var schemaAndRoutineDefs = string.Join("\r\n", jsSchemaLookupForJsFunctions.Select(s => "\tx." + s.Key + " = {\r\n\t\t" + string.Join(",\r\n\t\t", s.Value.ToArray()) + "\r\n\t}\r\n").ToArray()); var tsSchemaAndRoutineDefs = string.Join("\r\n", tsSchemaLookup.Select(s => "\t\tclass " + s.Key + " {\r\n" + string.Join(";\r\n", s.Value.ToArray()) + "\r\n\t\t}\r\n").ToArray()); var finalSB = new StringBuilder(routineContainerTemplate); jsFile.IncrementVersion(); // record changes against new version if (changesInFile != null && changesInFile.Count > 0) { JsFileChangesTracker.Instance.AddUpdate(endpoint, jsFile, changesInFile.Select(kv => kv.Value).ToList()); } if (rulesChanged) { JsFileChangesTracker.Instance.AddUpdate(endpoint, jsFile, new List <ChangeDescriptor> { ChangeDescriptor.Create("System", "One or more rules changed.") }); } finalSB.Replace("<<DATE>>", DateTime.Now.ToString("dd MMM yyyy, HH:mm")) .Replace("<<FILE_VERSION>>", jsFile.Version.ToString()) .Replace("<<SERVER_NAME>>", Environment.MachineName) .Replace("<<ENDPOINT>>", endpoint.Pedigree) .Replace("<<UNIQUE_SCHEMAS>>", string.Join(',', uniqueSchemas.Select(k => $"'{k}'"))) .Replace("<<Catalog>>", jsSafeNamespace) .Replace("<<ROUTINES>>", schemaAndRoutineDefs) ; var finalTypeScriptSB = new StringBuilder(); finalTypeScriptSB = finalTypeScriptSB.Append(typescriptDefinitionsContainer); // Custom/User types if (endpoint.CustomTypeLookupWithTypeScriptDef.Count > 0) { var customTSD = from kv in endpoint.CustomTypeLookupWithTypeScriptDef select $"\t\ttype {kv.Key} = {kv.Value};"; typeScriptParameterAndResultTypesSB.Insert(0, string.Join("\r\n", customTSD)); } var resultAndParameterTypes = typeScriptParameterAndResultTypesSB.ToString(); finalTypeScriptSB.Replace("<<DATE>>", DateTime.Now.ToString("dd MMM yyyy, HH:mm")) .Replace("<<FILE_VERSION>>", jsFile.Version.ToString()) .Replace("<<SERVER_NAME>>", Environment.MachineName) .Replace("<<Catalog>>", jsSafeNamespace) .Replace("<<ResultAndParameterTypes>>", resultAndParameterTypes) .Replace("<<MethodsStubs>>", tsSchemaAndRoutineDefs) ; finalSBMetric.End(); var toStringMetric = generateMetric.BeginChildStage("ToString"); var typescriptDefinitionsOutput = finalTypeScriptSB.ToString(); var finalOutput = finalSB.ToString(); toStringMetric.End(); var filePath = endpoint.OutputFilePath(jsFile); var minfiedFilePath = endpoint.MinifiedOutputFilePath(jsFile); var tsTypingsFilePath = endpoint.OutputTypeScriptTypingsFilePath(jsFile); var minifyMetric = generateMetric.BeginChildStage("Minify"); var minifiedSource = Uglify.Js(finalOutput /*, { }*/).Code; minifyMetric.End(); if (!Directory.Exists(endpoint.OutputDir)) { Directory.CreateDirectory(endpoint.OutputDir); } var fileOutputMetric = generateMetric.BeginChildStage("Write"); var jsFinalBytes = System.Text.Encoding.UTF8.GetBytes(finalOutput); var jsFinalMinifiedBytes = System.Text.Encoding.UTF8.GetBytes(minifiedSource); jsFile.ETag = Controllers.PublicController.ComputeETag(jsFinalBytes); jsFile.ETagMinified = Controllers.PublicController.ComputeETag(jsFinalMinifiedBytes); File.WriteAllText(filePath, finalOutput); File.WriteAllText(minfiedFilePath, minifiedSource); File.WriteAllText(tsTypingsFilePath, typescriptDefinitionsOutput); fileOutputMetric.End(); } finally { generateMetric.End(); SessionLog.InfoToFileOnly($"{endpoint.Pedigree.PadRight(25, ' ')} - {generateMetric.DurationInMS.ToString().PadLeft(4)} ms {jsFile.Filename.PadRight(20)} (source={source};rulesChanged={rulesChanged};changes={!noChanges}); {generateMetric.ChildDurationsSingleLine()}"); } }