/// <summary> /// Parses a PO (translation file) in a given path /// and returns a dictionary containing the translations in file /// </summary> /// <param name="fullFilePath">File Path</param> /// <returns>Returns a dictionary containing the translations in file</returns> public IOrderedDictionary <string, string> Parse(string fullFilePath) { var parser = new POParser(new POParserSettings()); string readerInput = _fileWrapper.Read(fullFilePath); if (string.IsNullOrEmpty(readerInput)) { return(null); } var result = parser.Parse(readerInput); if (result.Success) { var poFileValues = new OrderedDictionary <string, string>(); foreach (var poEntry in result.Catalog.Values) { var key = poEntry.Key.Id; var value = poEntry.FirstOrDefault(); poFileValues.Add(key, value); } return(poFileValues); } else { var diagnostics = result.Diagnostics; _logger.LogError($"Failed to parse PO file in path '{fullFilePath}'. Diagnostics = '{diagnostics}'"); return(null); } }
private static POCatalog CreateCatalog() { using var templateStream = File.OpenRead("Resources/LoTranslation.extracted.template.po"); var parser = new POParser(new POParserSettings()); var result = parser.Parse(templateStream, Encoding.UTF8); return(result.Catalog); }
private static void LoadLocaleFromWeb() { using (WebClient wc = new WebClient()) { string poText = wc.DownloadString(currentLocale); POParser parser = new POParser(); poCatalog = parser.Parse(poText).Catalog; } }
public PotControl() { InitializeComponent(); _parser = new POParser(new POParserSettings { PreserveHeadersOrder = true, ReadHeaderOnly = false, SkipComments = false, SkipInfoHeaders = false }); }
public void ParseFull() { var parser = new POParser(); POParseResult result; using (var ms = new MemoryStream(Resources.SamplePO)) result = parser.Parse(ms); Assert.True(result.Success); POCatalog catalog = result.Catalog; CheckHeader(catalog, expectComments: true, expectInfoHeaders: true, expectOrderedHeaders: false); CheckItems(catalog, expectComments: true); }
public void ParseSkipInfoHeaders() { var parser = new POParser(new POParserSettings { SkipInfoHeaders = true }); var input = new StreamReader(new MemoryStream(Resources.SamplePO)); POParseResult result = parser.Parse(input); Assert.True(result.Success); POCatalog catalog = result.Catalog; CheckHeader(catalog, expectComments: true, expectInfoHeaders: false, expectOrderedHeaders: false); CheckItems(catalog, expectComments: true); }
public void AddPoFile() { var filePath = "C:\\users\\pgpoulsen\\Downloads\\Locale\\da-DK\\LC_MESSAGES\\ShopFloorManagementSuite.po"; var parser = new POParser(new POParserSettings { // parser options... }); var reader = File.OpenText(filePath); var result = parser.Parse(reader); var key = result.Catalog.Keys.First(); var value = result.Catalog[key]; var v = value[0]; var plurals = result.Catalog.Keys.Where(x => x.PluralId != null).ToList(); }
private void LoadPOFile() { var parser = new POParser(); POParseResult result = null; using (var file = File.OpenRead(POFilePath)) { result = parser.Parse(file); } if (!result.Success) { throw new Exception($"Failed to parse po file {POFilePath}"); } var catalog = result.Catalog; TranslationData = new Dictionary <string, POTranslationEntry>(); for (int i = 0; i < catalog.Count; i++) { var entry = catalog[i]; if (entry is POSingularEntry sEntry) { string translation; if (!string.IsNullOrWhiteSpace(sEntry.Translation)) { translation = sEntry.Translation; } else if (LoadUntranslated) { translation = sEntry.Key.Id; } else { continue; } TranslationData.Add(sEntry.Key.ContextId, new POTranslationEntry(translation, i + 1)); } } }
public void ParseSkipComments() { var parser = new POParser(new POParserSettings { SkipComments = true }); // Encoding.GetString keeps BOM var input = new StreamReader(new MemoryStream(Resources.SamplePO)).ReadToEnd(); var result = parser.Parse(input); Assert.True(result.Success); var catalog = result.Catalog; CheckHeader(catalog, expectComments: false, expectInfoHeaders: true); CheckItems(catalog, expectComments: false); }
public void ParseHeaderOnly() { var parser = new POParser(new POParserSettings { ReadHeaderOnly = true }); POParseResult result; using (var ms = new MemoryStream(Resources.SamplePO)) result = parser.Parse(ms); Assert.True(result.Success); POCatalog catalog = result.Catalog; CheckHeader(catalog, expectComments: true, expectInfoHeaders: true, expectOrderedHeaders: false); Assert.Empty(catalog); }
public async Task <ApiResponse> Upload(IFormFile uploadedFile) { if (uploadedFile == null || uploadedFile.Length == 0) { return(new ApiResponse(Status404NotFound, L["File not selected"])); } var parser = new POParser(); using (var stream = new MemoryStream()) { await uploadedFile.CopyToAsync(stream); stream.Position = 0; string text; using (StreamReader reader = new StreamReader(stream, Encoding.UTF8)) { text = reader.ReadToEnd().Trim('\0'); } var result = parser.Parse(text); if (result.Success) { if (string.IsNullOrWhiteSpace(result.Catalog.Language)) { result.Catalog.Language = Path.GetFileNameWithoutExtension(uploadedFile.FileName); } await((StorageLocalizationProvider)localizationProvider).ImportTextCatalog(persistenceManager.Context, result.Catalog); logger.LogInformation($"File {uploadedFile.FileName} uploaded by {User.Identity.Name} and imported successfully"); return(new ApiResponse(Status200OK)); } else { return(new ApiResponse(Status400BadRequest, L["File not valid"])); } } }
/// <inheritdoc /> public override void Transform(Stream inputStream, Stream outputStream) { var parser = new POParser(new POParserSettings()); var result = parser.Parse(inputStream); if (result.Success) { var inputCatalog = result.Catalog; var outputCatalog = ProcessCatalog(inputCatalog); var generator = new POGenerator(new POGeneratorSettings()); generator.Generate(outputStream, outputCatalog); } else { var diagnosticMessages = GetDiagnosticMessages(result.Diagnostics); throw new POFileFormatException(diagnosticMessages); } }
private Dictionary <Regex, string> BuildKnownParts() { var start = @"(^|[:://]{1})"; var end = @"($|[:://\((]{1})"; using var partialStream = File.OpenRead(@"Resources\RegexPartials.po"); var parser = new POParser(new POParserSettings()); var result = parser.Parse(partialStream, System.Text.Encoding.UTF8); var parts = new Dictionary <Regex, string>(); foreach (var key in result.Catalog) { var translation = result.Catalog.GetTranslation(key.Key); if (string.IsNullOrEmpty(translation)) { continue; } parts.Add(new Regex(start + key.Key + end), translation); } using var parenthetecalStream = File.OpenRead(@"Resources\RegexParentheticals.po"); result = parser.Parse(parenthetecalStream, System.Text.Encoding.UTF8); // Added at the end so replacing doesn't break matching of parts foreach (var key in result.Catalog) { var translation = result.Catalog.GetTranslation(key.Key); if (string.IsNullOrEmpty(translation)) { continue; } parts.Add(new Regex(@"[\((]" + key.Key + @"[\))]"), $" ({translation})"); } return(parts); }
public static void LoadDataPOFile() { POFileData.Clear(); var pathPoFile = GetPathFilePo(DefaultLanguageValue); if (string.IsNullOrEmpty(pathPoFile) || !File.Exists(pathPoFile)) { return; } using (var reader = new StreamReader(pathPoFile, Encoding.UTF8)) { var parser = new POParser(); var result = parser.Parse(reader); if (!result.Success) { return; } var catalog = result.Catalog; //var languageName = catalog.Language.Replace('_', '-').Trim(); var languageName = DefaultLanguageValue.Replace('_', '-'); languageName = languageName.Replace(".po", "").Trim(); CultureInfo ci = new CultureInfo(languageName); CultureInfo.DefaultThreadCurrentCulture = ci; foreach (var item in catalog) { var keyLangue = item.Key.Id; var key = new POKey(keyLangue); var translation = catalog.GetTranslation(key); POFileData.Add(keyLangue, translation); } } }
public static void LoadLocaleFromFile() { string poFile = currentLocale + ".po"; string moFile = currentLocale + ".mo"; if (File.Exists(poFile)) { using (Stream stream = File.OpenRead(poFile)) { POParser parser = new POParser(); poCatalog = parser.Parse(stream).Catalog; } } else if (File.Exists(moFile)) { using (Stream stream = File.OpenRead(moFile)) { moCatalog = new Catalog(stream, new CultureInfo(currentLocale.Replace('_', '-'))); } } else { byte[] embeddedMo = (byte[])Properties.Resources.ResourceManager.GetObject( "locale_" + currentLocale); if (embeddedMo == null) { return; } using (Stream stream = new MemoryStream(embeddedMo)) { moCatalog = new Catalog(stream, new CultureInfo(currentLocale.Replace('_', '-'))); } } }
private static void BuildKeyCache() { if (!Directory.Exists(LOCALIZATION_RESOURCES_DIR)) { return; } var parser = new POParser(); var files = Directory.GetFiles(LOCALIZATION_RESOURCES_DIR, "*.po", SearchOption.TopDirectoryOnly); var keys = new List <string>(); foreach (var file in files) { using (var stream = File.OpenRead(file)) { var result = parser.Parse(stream); foreach (var key in result.Catalog.Keys.ToArray()) { if (!keys.Contains(key.Id)) { keys.Add(key.Id); } } } } if (AssetDatabase.LoadAssetAtPath <TextAsset>(LOCALIZATION_ALL_PATH)) { AssetDatabase.DeleteAsset(LOCALIZATION_ALL_PATH); } var assetPath = $"{LOCALIZATION_ALL_PATH}.txt"; File.WriteAllText(assetPath, string.Join("\n", keys)); AssetDatabase.Refresh(); }
private static POCatalog BuildCatalog(string filePath, POCatalog templateCatalog) { POCatalog?originalCatalog; if (File.Exists(filePath)) { using (var reader = new StreamReader(filePath)) { var parseResult = new POParser().Parse(reader); if (!parseResult.Success) { var diagnosticMessages = parseResult.Diagnostics .Where(diagnostic => diagnostic.Severity == DiagnosticSeverity.Error); throw new CommandException($"Template file \"{filePath}\" is invalid: {string.Join(Environment.NewLine, diagnosticMessages)}"); } originalCatalog = parseResult.Catalog; } } else { originalCatalog = null; } var catalog = new POCatalog(); foreach (var templateEntry in templateCatalog) { var flags = new HashSet <string>(GetPOEntryFlags(templateEntry)); if (flags.Contains("removed")) { continue; } IEnumerable <string> originalFlags; if (originalCatalog != null && originalCatalog.TryGetValue(templateEntry.Key, out var originalEntry)) { originalFlags = GetPOEntryFlags(originalEntry); } else { (originalFlags, originalEntry) = (Enumerable.Empty <string>(), null); } var isNew = flags.Remove("new"); var hasChanged = flags.Remove("changed"); var isOriginalFuzzy = originalFlags.Contains("fuzzy"); IPOEntry entry = (hasChanged ? templateEntry : (originalEntry ?? templateEntry)) switch { POSingularEntry singularEntry => new POSingularEntry(templateEntry.Key) { Translation = singularEntry.Translation }, POPluralEntry pluralEntry => new POPluralEntry(templateEntry.Key, pluralEntry), _ => throw new InvalidOperationException() }; if (isNew || hasChanged || isOriginalFuzzy) { flags.Add("fuzzy"); entry.Comments = templateEntry.Comments?.Where(comment => !(comment is POFlagsComment)).ToList() ?? new List <POComment>(); entry.Comments.Add(new POFlagsComment { Flags = flags }); } else { entry.Comments = templateEntry.Comments; } catalog.Add(entry); } return(catalog); }
private POCatalog BuildTemplateCatalog(string?filePath, KeyValuePair <string, ExtractResult>[] fileTexts) { POCatalog?originalCatalog; if (File.Exists(filePath)) { using (var reader = new StreamReader(filePath !)) { var parseResult = new POParser().Parse(reader); if (!parseResult.Success) { var diagnosticMessages = parseResult.Diagnostics .Where(diagnostic => diagnostic.Severity == DiagnosticSeverity.Error); throw new CommandException($"Template file \"{filePath}\" is invalid: {string.Join(Environment.NewLine, diagnosticMessages)}"); } originalCatalog = parseResult.Catalog; for (var i = originalCatalog.Count - 1; i >= 0; i--) { var originalEntry = originalCatalog[i]; if (GetPOEntryFlags(originalEntry).Contains("removed")) { originalCatalog.RemoveAt(i); } } } } else { originalCatalog = null; } var groupsById = fileTexts .Where(fileText => fileText.Value.Texts != null) .SelectMany(fileText => fileText.Value.Texts !.Select(text => (text, fileText.Key))) .GroupBy(item => new POKey(item.text.Id, item.text.PluralId, item.text.ContextId)) .OrderBy(item => item.Key, POKeyComparer.Instance); var catalog = new POCatalog(); foreach (var groupById in groupsById) { var key = groupById.Key; foreach (var(text, sourceFilePath) in groupById) { if (!catalog.TryGetValue(key, out var entry)) { var state = "new"; if (originalCatalog != null && originalCatalog.TryGetValue(key, out var originalEntry)) { var hasChanged = originalEntry.Count == 0 || originalEntry[0] != text.Id || (text.PluralId == null ? originalEntry.Count > 1 : originalEntry.Count != 2 || originalEntry[1] != text.PluralId); state = hasChanged ? "changed" : null; } entry = text.PluralId == null ? (IPOEntry) new POSingularEntry(key) { Translation = text.Id } : new POPluralEntry(key) { text.Id, text.PluralId }; entry.Comments = new List <POComment>(); if (state != null) { entry.Comments.Add(new POFlagsComment { Flags = new HashSet <string> { state } }); } if (!NoReferences) { entry.Comments.Add(new POReferenceComment() { References = new List <POSourceReference>() }); } catalog.Add(entry); } if (!NoReferences) { var referenceComment = entry.Comments.OfType <POReferenceComment>().First(); referenceComment.References.Add(new POSourceReference(sourceFilePath, text.LineNumber)); } if (!NoComments && !string.IsNullOrEmpty(text.Comment)) { entry.Comments.Add(new POExtractedComment { Text = text.Comment }); } } } if (originalCatalog != null) { foreach (var originalEntry in originalCatalog) { if (!catalog.Contains(originalEntry.Key)) { var entry = new POSingularEntry(originalEntry.Key) { Translation = "***THIS ENTRY WAS REMOVED. DO NOT TRANSLATE!***" }; entry.Comments = new List <POComment> { new POFlagsComment { Flags = new HashSet <string> { "removed" } } }; catalog.Add(entry); } } } return(catalog); }
private bool LoadFromPoCatalog(Stream stream) { if (stream == null) { throw new ArgumentNullException(nameof(stream)); } var parser = new POParser(new POParserSettings()); var result = parser.Parse(stream, Encoding.UTF8); if (result.Success) { foreach (var key in result.Catalog) { var original = key.Key.Id; var item = result.Catalog[key.Key]; if (!(item is POSingularEntry singleItem)) { Debug.WriteLine($"Unhandled translation item type {item.GetType().Name}"); continue; } if (LoadComments) { ExtractComments(key.Key, singleItem); } // If we were a partial match we don't need to keep the translation since // the partials will be processed again later. We extract the comments // above anyway so we maintain the Korean Text comments that may no longer // exist in the bin files. if (WasPartial(singleItem)) { continue; } var translation = singleItem.Translation; if (string.IsNullOrEmpty(translation)) { continue; } if (Catalog.ContainsKey(original)) { if (translation != Catalog[original]) { Debug.WriteLine($"Duplicate with different translation: '{original}' != '{translation}'"); } continue; } Catalog[key.Key.Id] = translation; } return(true); } else { foreach (var error in result.Diagnostics.Where(d => d.Severity > DiagnosticSeverity.Warning) .Select(d => d.ToString())) { Errors.Append(error); } return(false); } }
static void Main(string[] args) { if (args.Length > 0) { if (args[0].ToLower() == "findchars") { var poDir = args[1]; Console.WriteLine("Finding PO Files From " + poDir + "..."); var poFiles = Directory.GetFiles(poDir, "*.po"); var charList = new List <char>(); var parser = new POParser(); foreach (String poPath in poFiles) { Console.WriteLine("Processing " + poPath + "..."); var poFile = File.OpenRead(poPath); var parsed = parser.Parse(poFile); foreach (IPOEntry entry in parsed.Catalog) { foreach (String trans in entry) { foreach (char c in trans.ToCharArray()) { if (!charList.Contains(c)) { charList.Add(c); } } } } } Console.WriteLine("Found " + charList.Count + " chars."); var outputFile = new StreamWriter(args[2], false, Encoding.UTF8, 8192); var options = new JsonSerializerOptions(); options.Encoder = System.Text.Encodings.Web.JavaScriptEncoder.UnsafeRelaxedJsonEscaping; var doc = JsonSerializer.Serialize(charList, options); outputFile.WriteLine(doc); outputFile.Close(); } else if (args[0].ToLower() == "topo") { var xsDir = args[1]; var poDir = args[2]; } else if (args[0].ToLower() == "toxs" && args.Length == 5) { var poDir = args[1]; var xsDir = args[2]; var tblFile = File.Open(args[3], FileMode.Open); string tblJson; StreamReader reader = new StreamReader(tblFile); tblJson = reader.ReadToEnd(); Dictionary <string, string> tbl = (Dictionary <string, string>)JsonSerializer.Deserialize(tblJson, typeof(Dictionary <string, string>)); var outputDir = args[4]; Console.WriteLine("[INFO] Finding PO Files From " + poDir + "..."); var poFiles = Directory.GetFiles(poDir, "*.po"); var parser = new POParser(); foreach (String poPath in poFiles) { Console.WriteLine("[INFO] Processing " + poPath + "..."); var targetXsDirs = Directory.GetDirectories(xsDir).Where(path => { return(Path.GetFileName(path) == Path.GetFileNameWithoutExtension(poPath)); }); if (targetXsDirs.Count() != 1) { Console.WriteLine("[WARN] Cannot find matched directory with " + poPath + ". Skipping..."); continue; } var targetXsDir = targetXsDirs.First(); var poFile = File.OpenRead(poPath); var parsed = parser.Parse(poFile); var files = new Dictionary <string, XS>(); foreach (IPOEntry entry in parsed.Catalog) { var contextId = entry.Key.ContextId.Split(':'); var xsFileName = contextId[0]; var entryIndex = contextId[1]; var subEntryIndex = contextId[2]; if (entry.Count() != 1) { Console.WriteLine("[WARN] Invaild Entry " + entry.Key.ContextId + ". Skipping..."); continue; } if (entry[0] == "") { //Console.WriteLine("[WARN] Empty Entry " + entry.Key.ContextId + ". Skipping..."); continue; } var trans = entry[0].Trim().Trim('\n', '\r'); foreach (KeyValuePair <string, string> tblChar in tbl) { trans = trans.Replace(tblChar.Value, tblChar.Key); } if (!files.ContainsKey(xsFileName)) { if (!File.Exists(targetXsDir + Path.DirectorySeparatorChar + xsFileName)) { Console.WriteLine("[WARN] File " + targetXsDir + Path.DirectorySeparatorChar + xsFileName + " is not exist. Skipping..."); continue; } files.Add(xsFileName, new XS(targetXsDir + Path.DirectorySeparatorChar + xsFileName)); } var xs = files[xsFileName]; var targetLabels = xs.Labels.Where(label => { return(label.Name == (entryIndex + ":" + subEntryIndex)); }); if (targetLabels.Count() != 1) { Console.WriteLine("[WARN] Entry on XS " + entry.Key.ContextId + " is not exist. Skipping..."); continue; } var targetLabel = targetLabels.First(); targetLabel.Text = trans; } Console.WriteLine("[INFO] Saving " + Path.GetFileNameWithoutExtension(poPath) + " Files..."); foreach (KeyValuePair <string, XS> file in files) { if (!Directory.Exists(outputDir)) { Directory.CreateDirectory(outputDir); } if (!Directory.Exists(outputDir + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(poPath))) { Directory.CreateDirectory(outputDir + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(poPath)); } file.Value.Save(outputDir + Path.DirectorySeparatorChar + Path.GetFileNameWithoutExtension(poPath) + Path.DirectorySeparatorChar + file.Key); } } } else { Console.WriteLine("XSConverter <findchars|topo|toxs> <args...>"); Console.WriteLine("Commands"); Console.WriteLine(" findchars <poDir> <outputJson> - find used characters on selected po files and export to json."); Console.WriteLine(" topo <xsDir> <poDir> - Export XS Files To Gettext PO Files."); Console.WriteLine(" toxs <poDir> <xsDir> <tblJson> <outputDir> - Import Gettext PO Files to XS Files."); } } else { Console.WriteLine("XSConverter <findchars|topo|toxs> <args...>"); Console.WriteLine("Commands"); Console.WriteLine(" findchars <poDir> <outputJson> - find used characters on selected po files and export to json."); Console.WriteLine(" topo <xsDir> <poDir> - Export XS Files To Gettext PO Files."); Console.WriteLine(" toxs <poDir> <xsDir> <tblJson> <outputDir> - Import Gettext PO Files to XS Files."); } }
public async Task Visit(DataStructure dataStructure) { await Task.CompletedTask; var language = Thread.CurrentThread.CurrentCulture.Name; var projectName = dataStructure.Project.Name; var projectPath = dataStructure.ProjectDirectory; var localizerEntries = dataStructure.LocalizerEntries; var POFilePath = Path.Combine(projectPath, "Localization", language + ".po"); POCatalog catalog = null; if (File.Exists(POFilePath)) { using var sr = new StreamReader(POFilePath, Encoding.UTF8); var parser = new POParser(POParserSettings.Default); var result = parser.Parse(sr); if (result.Success) { catalog = result.Catalog; foreach (var r in catalog) { r.Comments.Clear(); } } else { var diagnostics = result.Diagnostics; // examine diagnostics, display an error, etc... foreach (var diagnostic in diagnostics) { if (diagnostic.Severity.Equals(Karambolo.PO.DiagnosticSeverity.Error)) { Console.WriteLine($"Error has occurred while Parse the PO file: {POFilePath}"); } } } } if (catalog == null) { catalog = new POCatalog { Encoding = Encoding.UTF8.BodyName, PluralFormCount = 1, PluralFormSelector = "0", Language = language }; var assembly = typeof(IVisitor).Assembly; catalog.Headers = new Dictionary <string, string> { { "PO-Revision-Date", DateTime.UtcNow.ToString() }, { "Project-Id-Version", projectName }, { "X-Crowdin-Generator", $"Generated by {assembly.GetName().Name} {assembly.GetCustomAttribute<AssemblyInformationalVersionAttribute>().InformationalVersion}" }, }; } HashSet <POKey> sets = new HashSet <POKey>(); foreach (var entry in localizerEntries) { var key = new POKey(entry.Id, null, entry.ContextId); sets.Add(key); if (catalog.TryGetValue(key, out var POEntry)) { if (!POEntry.Comments.OfType <POExtractedComment>().Any(c => c.Text.Equals(entry.SourceCode))) { POEntry.Comments.Add(new POExtractedComment { Text = entry.SourceCode }); } var referenceComment = POEntry.Comments.OfType <POReferenceComment>().FirstOrDefault(); if (referenceComment == null) { POEntry.Comments.Add(new POReferenceComment { References = new List <POSourceReference>() { POSourceReference.Parse(entry.SourceReference) } }); } else { var sourceReference = POSourceReference.Parse(entry.SourceReference); if (!referenceComment.References.Any(r => r.FilePath.Equals(sourceReference.FilePath) && r.Line.Equals(sourceReference.Line))) { referenceComment.References.Add(sourceReference); } } } else { POEntry = new POSingularEntry(key) { Comments = new List <POComment>() { new POReferenceComment { References = new List <POSourceReference>() { POSourceReference.Parse(entry.SourceReference) } }, new POExtractedComment { Text = entry.SourceCode }, } }; catalog.Add(POEntry); } } var keys = catalog.Keys.ToList(); keys.Where(k => !sets.Contains(k)).ToList().ForEach(k => catalog.Remove(k)); if (catalog.Headers.ContainsKey("PO-Revision-Date")) { catalog.Headers["PO-Revision-Date"] = DateTime.UtcNow.ToString(); } var generator = new POGenerator(POGeneratorSettings.Default); using var sw = new StreamWriter(POFilePath, false, Encoding.UTF8); generator.Generate(sw, catalog); }
public void ParseWithStringDecodingOptions() { CheckCatalog(new POStringDecodingOptions { }, Environment.NewLine, Environment.NewLine); CheckCatalog(new POStringDecodingOptions { KeepKeyStringsPlatformIndependent = true }, "\n", Environment.NewLine); CheckCatalog(new POStringDecodingOptions { KeepTranslationStringsPlatformIndependent = true }, Environment.NewLine, "\n"); CheckCatalog(new POStringDecodingOptions { KeepKeyStringsPlatformIndependent = true, KeepTranslationStringsPlatformIndependent = true }, "\n", "\n"); void CheckCatalog(POStringDecodingOptions options, string expectedKeyStringNewLine, string expectedTranslationStringNewLine) { var parserSettings = new POParserSettings { StringDecodingOptions = options }; var parser = new POParser(parserSettings); POParseResult result = parser.Parse(new MemoryStream(Resources.NewLineTestPO)); Assert.True(result.Success); POCatalog catalog = result.Catalog; Assert.Equal(4, catalog.Headers.Count); Assert.Equal("en_US", catalog.Headers["Language"]); Assert.Equal(1, catalog.Count); Assert.Equal( new POKey($"Id of{expectedKeyStringNewLine}a long text", $"Plural id of{expectedKeyStringNewLine}a long text", $"Context id of{expectedKeyStringNewLine}a long text"), catalog[0].Key); IPOEntry entry = catalog[0]; Assert.Equal(2, entry.Count); Assert.Equal($"Singular translation of{expectedTranslationStringNewLine}a long text", entry[0]); Assert.Equal($"Plural translation of{expectedTranslationStringNewLine}a long text", entry[1]); IList <POComment> comments = catalog[0].Comments; Assert.Equal(3, comments?.Count ?? 0); POComment comment = comments[0]; Assert.Equal(POCommentKind.PreviousValue, comment.Kind); Assert.Equal(POIdKind.ContextId, ((POPreviousValueComment)comment).IdKind); Assert.Equal($"Previous context id of{expectedKeyStringNewLine}a long text", ((POPreviousValueComment)comment).Value); comment = comments[1]; Assert.Equal(POCommentKind.PreviousValue, comment.Kind); Assert.Equal(POIdKind.Id, ((POPreviousValueComment)comment).IdKind); Assert.Equal($"Previous id of{expectedKeyStringNewLine}a long text", ((POPreviousValueComment)comment).Value); comment = comments[2]; Assert.Equal(POCommentKind.PreviousValue, comment.Kind); Assert.Equal(POIdKind.PluralId, ((POPreviousValueComment)comment).IdKind); Assert.Equal($"Previous plural id of{expectedKeyStringNewLine}a long text", ((POPreviousValueComment)comment).Value); } }