private int CreateNewLdapUsers(ITaskReporter reporter) { IList <PersonModel> persons = this.optigem.GetNewPersons().ToList(); reporter.StartTask("Neue Personen in OPTIGEM vorbereiten", persons.Count); if (persons.Any()) { int intranetUid = this.optigem.GetNextSyncUserId(); foreach (var person in persons) { string username = LdapBuilder.GetCn((person.Vorname?.Trim() + "." + person.Nachname?.Trim()).Trim('.')).ToLower(CultureInfo.CurrentCulture); reporter.Progress(username); person.Username = username.Length > 50 ? username.Substring(0, 50) : username; person.Password = this.CalculatePassword(); person.SyncUserId = intranetUid++; reporter.Log(person.Username + " mit Id " + person.SyncUserId + " angelegt."); } this.optigem.SetIntranetUserIds(persons); return(persons.Count); } return(0); }
private SearchTask(string name, string title, ITaskReporter reporter) { this.name = name; this.title = title; this.reporter = reporter; sw = new System.Diagnostics.Stopwatch(); sw.Start(); }
public SyncResult Do(ITaskReporter reporter, bool fullSync) { string logName = "SyncLog_" + DateTime.Now.ToString("yyyy-MM-dd_HH-mm-ss") + ".log"; using (var wrappedReport = new LoggingReporter(Path.Combine(this.logDir.FullName, logName), reporter)) { return(this.InternalDo(wrappedReport, fullSync)); } }
public void SyncMembership(ITaskReporter reporter) { var filteredGroups = this.groups.Where(g => g.SyncGroupSource == this.configuration.LdapSyncGroupSource).ToList(); reporter.StartTask("Gruppenmitgliedschaften abgleichen", filteredGroups.Count); // Refetch group members (might have been changed/renamed!). var newGroups = this.GetLdapGroups(); foreach (var group in filteredGroups) { reporter.Progress(group.Name); // Set base for diff with new values. LdapGroup newGroup = newGroups.FirstOrDefault(g => g.SyncGroupId == group.SyncGroupId && g.SyncGroupSource == this.configuration.LdapSyncGroupSource); if (newGroup != null) { group.SetOriginalMembers(newGroup.OrginalMembers); } DirectoryAttributeModification[] addedMembers = group.AddedMembers .Select(m => new DirectoryAttribute("member", m).CreateModification(DirectoryAttributeOperation.Add)) .ToArray(); DirectoryAttributeModification[] removedMembers = group.RemovedMembers .Where(m => !m.EndsWith(this.configuration.LdapGruppenBaseDn)) .Select(m => new DirectoryAttribute("member", m).CreateModification(DirectoryAttributeOperation.Delete, forceValue: true)) .ToArray(); if (addedMembers.Any() || removedMembers.Any()) { string groupDn = $"cn={group.Name},{this.configuration.LdapGruppenBaseDn}"; DirectoryAttributeModification[] modifications = addedMembers.Concat(removedMembers).ToArray(); this.ldap.ModifyEntry(groupDn, modifications); foreach (var item in modifications) { foreach (string name in item.GetValues <string>()) { reporter.Log((item.Operation == DirectoryAttributeOperation.Add ? "Hinzugefügt" : "Entfernt") + ": " + name); } } Log.Source.TraceEvent(TraceEventType.Information, 0, "Updated members of LDAP group '{0}': {1} added, {2} removed.", group.Name, addedMembers.Length, removedMembers.Length); foreach (string modifiedPrint in modifications.SelectMany(Log.Print)) { Log.Source.TraceEvent(TraceEventType.Verbose, 0, "Update to LDAP group '{0}': {1}.", group.Name, modifiedPrint); } } } }
/// <summary> /// Create async or threaded task /// </summary> public SearchTask(string name, string title, ResolveHandler finished, int total, ITaskReporter reporter) : this(name, title, reporter) { this.total = total; this.finished = finished; progressId = StartReport(title); cancelEvent = new EventWaitHandle(false, EventResetMode.ManualReset); if (IsProgressRunning(progressId)) { Progress.RegisterCancelCallback(progressId, () => cancelEvent.Set()); } }
/// <summary> /// Pulls data from the Spreadsheet with id <see cref="SpreadSheetId"/> and uses <paramref name="columnMapping"/> /// to populate the <paramref name="collection"/>. /// </summary> /// <param name="sheetId">The sheet(Spreadsheet tab) to pull the data from.</param> /// <param name="collection">The collection to insert the data into.</param> /// <param name="columnMapping">The column mappings control what data will be extracted for each column of the sheet. The list must contain 1 <see cref="IPullKeyColumn"/>.</param> /// <param name="removeMissingEntries">After a pull has completed any keys that exist in the <paramref name="collection"/> but did not exist in the sheet are considered missing, /// this may be because they have been deleted from the sheet. A value of true will remove these missing entries where false will preserve them.</param> /// <param name="reporter">Optional reporter to display the progress and status of the task.</param> /// <param name="createUndo">Should an Undo be recorded so any changes can be reverted?</param> public void PullIntoStringTableCollection(int sheetId, StringTableCollection collection, IList <SheetColumn> columnMapping, bool removeMissingEntries = false, ITaskReporter reporter = null, bool createUndo = false) { VerifyPushPullArguments(sheetId, collection, columnMapping, typeof(IPullKeyColumn)); try { var modifiedAssets = collection.StringTables.Select(t => t as Object).ToList(); modifiedAssets.Add(collection.SharedData); if (createUndo) { Undo.RecordObjects(modifiedAssets.ToArray(), $"Pull `{collection.TableCollectionName}` from Google sheets"); } reporter?.Start($"Pull `{collection.TableCollectionName}` from Google sheets", "Preparing columns"); // The response columns will be in the same order we request them, we need the key // before we can process any values so ensure the first column is the key column. var sortedColumns = columnMapping.OrderByDescending(c => c is IPullKeyColumn).ToList(); // We can only use public API. No data filters. // We use a data filter when possible as it allows us to remove a lot of unnecessary information, // such as unneeded sheets and columns, which reduces the size of the response. A Data filter can only be used with OAuth authentication. reporter?.ReportProgress("Generating request", 0.1f); ClientServiceRequest <Spreadsheet> pullReq = UsingApiKey ? GeneratePullRequest() : GenerateFilteredPullRequest(sheetId, columnMapping); reporter?.ReportProgress("Sending request", 0.2f); var response = ExecuteRequest <Spreadsheet, ClientServiceRequest <Spreadsheet> >(pullReq); reporter?.ReportProgress("Validating response", 0.5f); // When using an API key we get all the sheets so we need to extract the one we are pulling from. var sheet = UsingApiKey ? response.Sheets?.FirstOrDefault(s => s?.Properties?.SheetId == sheetId) : response.Sheets[0]; if (sheet == null) { throw new Exception($"No sheet data available for {sheetId} in Spreadsheet {SpreadSheetId}."); } // The data will be structured differently if we used a filter or not so we need to extract the parts we need. var pulledColumns = new List <(IList <RowData> rowData, int valueIndex)>(); if (UsingApiKey) { // When getting the whole sheet all the columns are stored in a single Data. We need to extract the correct value index for each column. foreach (var sortedCol in sortedColumns) { pulledColumns.Add((sheet.Data[0].RowData, sortedCol.ColumnIndex)); } } else { if (sheet.Data.Count != columnMapping.Count) { throw new Exception($"Column mismatch. Expected a response with {columnMapping.Count} columns but only got {sheet.Data.Count}"); } // When using a filter each Data represents a single column. foreach (var d in sheet.Data) { pulledColumns.Add((d.RowData, 0)); } } MergePull(pulledColumns, collection, columnMapping, removeMissingEntries, reporter); // There is a bug that causes Undo to not set assets dirty (case 1240528) so we always set the asset dirty. modifiedAssets.ForEach(EditorUtility.SetDirty); LocalizationEditorSettings.EditorEvents.RaiseCollectionModified(this, collection); } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
/// <summary> /// Extracts data from <paramref name="collection"/> using <paramref name="columnMapping"/> and sends it to the sheet /// inside of the Spreadsheet with id <see cref="SpreadSheetId"/>. /// This method requires the <see cref="SheetsService"/> to use OAuth authorization as an API Key does not have the ability to write to a sheet. /// </summary> /// <param name="sheetId">The sheet(Spreadsheet tab) to insert the data into.</param> /// <param name="collection">The collection to extract the data from.</param> /// <param name="columnMapping">The column mappings control what data will be extracted for each column of the sheet. The list must contain 1 <see cref="KeyColumn"/>.</param> /// <param name="reporter">Optional reporter to display the progress and status of the task.</param> public void PushStringTableCollection(int sheetId, StringTableCollection collection, IList <SheetColumn> columnMapping, ITaskReporter reporter = null) { VerifyPushPullArguments(sheetId, collection, columnMapping, typeof(KeyColumn)); // Nothing to push if (collection.StringTables.Count == 0) { return; } try { reporter?.Start($"Push `{collection.TableCollectionName}` to Google Sheets", "Checking if sheet needs resizing"); var requests = new List <Request>(); var rowCount = GetRowCount(sheetId); // Do we need to resize the sheet? var requiredRows = collection.SharedData.Entries.Count + 1; // + 1 for the header row if (collection.SharedData.Entries.Count > rowCount) { reporter?.ReportProgress("Generating sheet resize request", 0.15f); requests.Add(ResizeRow(sheetId, requiredRows)); } GeneratePushRequests(sheetId, collection, columnMapping, requests, reporter); reporter?.ReportProgress("Sending Request", 0.5f); var resp = SendBatchUpdateRequest(SpreadSheetId, requests); reporter?.Completed($"Pushed {requiredRows} rows successfully."); } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
/// <summary> /// Pulls data from the Spreadsheet with id <see cref="SpreadSheetId"/> and uses <paramref name="columnMapping"/> /// to populate the <paramref name="collection"/>. /// </summary> /// <param name="sheetId">The sheet(Spreadsheet tab) to pull the data from.</param> /// <param name="collection">The collection to insert the data into.</param> /// <param name="columnMapping">The column mappings control what data will be extracted for each column of the sheet. The list must contain 1 <see cref="IPullKeyColumn"/>.</param> /// <param name="removeMissingEntries">After a pull has completed any keys that exist in the <paramref name="collection"/> but did not exist in the sheet are considered missing, /// this may be because they have been deleted from the sheet. A value of true will remove these missing entries where false will preserve them.</param> /// <param name="reporter">Optional reporter to display the progress and status of the task.</param> /// <param name="createUndo">Should an Undo be recorded so any changes can be reverted?</param> public void PullIntoStringTableCollection(int sheetId, StringTableCollection collection, IList <SheetColumn> columnMapping, bool removeMissingEntries = false, ITaskReporter reporter = null, bool createUndo = false) { VerifyPushPullArguments(sheetId, collection, columnMapping, typeof(IPullKeyColumn)); try { var modifiedAssets = collection.StringTables.Select(t => t as Object).ToList(); modifiedAssets.Add(collection.SharedData); if (createUndo) { Undo.RecordObjects(modifiedAssets.ToArray(), $"Pull `{collection.TableCollectionName}` from Google sheets"); } reporter?.Start($"Pull `{collection.TableCollectionName}` from Google sheets", "Preparing columns"); // The response columns will be in the same order we request them, we need the key // before we can process any values so ensure the first column is the key column. var sortedColumns = columnMapping.OrderBy(c => c is IPullKeyColumn).ToList(); reporter?.ReportProgress("Generating request", 0.1f); var pullReq = GeneratePullRequest(sheetId, columnMapping); reporter?.ReportProgress("Sending request", 0.2f); var response = ExecuteRequest <Spreadsheet, GetByDataFilterRequest>(pullReq); reporter?.ReportProgress("Validating response", 0.5f); if (response.Sheets == null || response.Sheets.Count == 0) { throw new Exception($"No sheet data available for {sheetId} in Spreadsheet {SpreadSheetId}."); } var sheet = response.Sheets[0]; if (sheet.Data.Count != columnMapping.Count) { throw new Exception($"Column mismatch. Expected a response with {columnMapping.Count} columns but only got {sheet.Data.Count}"); } MergePull(sheet, collection, columnMapping, removeMissingEntries, reporter); if (!createUndo) { modifiedAssets.ForEach(EditorUtility.SetDirty); } LocalizationEditorSettings.EditorEvents.RaiseCollectionModified(this, collection); } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
/// <summary> /// Exports all <see cref="StringTable"/> in <paramref name="collections"/> as 1 or more XLIFF files where each file represents a single language. /// </summary> /// <param name="source">This is the language that will be used as the source language for all generated XLIFF files.</param> /// <param name="directory">The directory to output the generated XLIFF files.</param> /// <param name="name">The default name for all generated XLIFF files. Files will be saved with the full name "[name]_[Language Code].xlf"</param> /// <param name="version">The XLIFF version to generate the files in.</param> /// <param name="collections">1 or more <see cref="StringTableCollection"/>. The collections will be combines into language groups where each file represents a single </param> /// <param name="reporter">Optional reporter which can report the current progress.</param> public static void Export(LocaleIdentifier source, string directory, string name, XliffVersion version, ICollection <StringTableCollection> collections, ITaskReporter reporter = null) { if (collections == null) { throw new ArgumentNullException(nameof(collections)); } var dict = new Dictionary <StringTableCollection, HashSet <int> >(); foreach (var c in collections) { dict[c] = new HashSet <int>(Enumerable.Range(0, c.StringTables.Count)); } ExportSelected(source, directory, name, version, dict, reporter); }
/// <summary> /// Pulls data from the Spreadsheet with id <see cref="TableId"/> and uses <paramref name="fieldMapping"/> /// to populate the <paramref name="collection"/>. /// </summary> /// <param name="collection">The collection to insert the data into.</param> /// <param name="fieldMapping">The column mappings control what data will be extracted for each column of the sheet. The list must contain 1 <see cref="IPullKeyColumn"/>.</param> /// <param name="removeMissingEntries">After a pull has completed any keys that exist in the <paramref name="collection"/> but did not exist in the sheet are considered missing, /// this may be because they have been deleted from the sheet. A value of true will remove these missing entries where false will preserve them.</param> /// <param name="reporter">Optional reporter to display the progress and status of the task.</param> /// <param name="createUndo">Should an Undo be recorded so any changes can be reverted?</param> public void PullIntoStringTableCollection(StringTableCollection collection, IList <SheetColumn> fieldMapping, bool removeMissingEntries = false, ITaskReporter reporter = null, bool createUndo = false) { VerifyPushPullArguments(collection, fieldMapping, typeof(IPullKeyColumn)); try { var modifiedAssets = collection.StringTables.Select(t => t as Object).ToList(); modifiedAssets.Add(collection.SharedData); if (createUndo) { Undo.RecordObjects(modifiedAssets.ToArray(), $"Pull `{collection.TableCollectionName}` from JSON file"); } reporter?.Start($"Pull `{collection.TableCollectionName}` from JSON file", "Preparing fields"); // The response columns will be in the same order we request them, we need the key // before we can process any values so ensure the first column is the key column. // var sortedColumns = fieldMapping.OrderByDescending(c => c is IPullKeyColumn).ToList(); // We can only use public API. No data filters. // We use a data filter when possible as it allows us to remove a lot of unnecessary information, // such as unneeded sheets and columns, which reduces the size of the response. A Data filter can only be used with OAuth authentication. reporter?.ReportProgress("Generating request", 0.1f); // TODO we need to get the json data. // ClientServiceRequest<Spreadsheet> pullReq = UsingApiKey // ? GeneratePullRequest() // : GenerateFilteredPullRequest(sheetId, fieldMapping); reporter?.ReportProgress("Sending request", 0.2f); var table = TableDatabase.Get.GetTable(TableId); reporter?.ReportProgress("Validating response", 0.5f); // When using an API key we get all the sheets so we need to extract the one we are pulling from. if (table == null) { throw new Exception($"No table data available for {TableId}"); } // The data will be structured differently if we used a filter or not so we need to extract the parts we need. var pulledRows = new List <(uint valueIndex, TableRow rowData)>(); if (UsingApiKey) { // When getting the whole sheet all the columns are stored in a single Data. We need to extract the correct value index for each column. // foreach (var sortedCol in sortedColumns) // { // TableRow row = table.Rows[0]; // pulledRows.Add((row.Fields.Values, sortedCol.ColumnIndex)); // } } else { // TODO check the fieldmapping count and see if it is equal to the object children amount // When using a filter each Data represents a single column. pulledRows = table.Rows.Select(t => (t.Key, t.Value)).ToList(); } MergePull(pulledRows, collection, fieldMapping, removeMissingEntries, reporter); // There is a bug that causes Undo to not set assets dirty (case 1240528) so we always set the asset dirty. modifiedAssets.ForEach(EditorUtility.SetDirty); // if(LocalizationEditorSettings.EditorEvents.CollectionModified != null) LocalizationEditorSettings.EditorEvents.RaiseCollectionMod(this, collection); AssetDatabase.SaveAssets(); } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
/// <summary> /// Exports a <see cref="StringTableCollection"/> using <paramref name="columnMappings"/> to control the contents of each exported column. /// <see cref="ColumnMapping.CreateDefaultMapping(bool)"/>. /// </summary> /// <param name="writer">The target that will be populated with CSV data.</param> /// <param name="collection">The collection to export to CSV.</param> /// <param name="cellMappings">Controls what will be exported. /// The <seealso cref="KeyIdColumns"/> can be used to export the Key, Id and shared comments whilst <seealso cref="LocaleColumns"/> can be /// used to export the values and comments for a specific <see cref="UnityEngine.Localization.Locale"/></param>. /// <seealso cref="ColumnMapping.CreateDefaultMapping(bool)"/> can be used to generate the default columns for the project. /// <param name="reporter">An optional reporter that can be used to provide feedback during export.</param> public static void Export(TextWriter writer, StringTableCollection collection, IList <CsvColumns> columnMappings, ITaskReporter reporter = null) { if (writer == null) { throw new ArgumentNullException(nameof(writer)); } if (collection == null) { throw new ArgumentNullException(nameof(collection)); } VerifyColumnMappings(columnMappings); using (var csvWriter = new CsvWriter(writer, CultureInfo.InvariantCulture)) { try { reporter?.Start("Exporting CSV", string.Empty); reporter?.ReportProgress("Writing Headers", 0); foreach (var cell in columnMappings) { cell.WriteBegin(collection, csvWriter); } reporter?.ReportProgress("Writing Contents", 0.1f); foreach (var row in collection.GetRowEnumerator()) { if (row.TableEntries[0] != null && row.TableEntries[0].SharedEntry.Metadata.HasMetadata <ExcludeEntryFromExport>()) { continue; } csvWriter.NextRecord(); foreach (var cell in columnMappings) { cell.WriteRow(row.KeyEntry, row.TableEntries, csvWriter); } } foreach (var cell in columnMappings) { cell.WriteEnd(collection); } reporter?.Completed("Finished Exporting"); } catch (Exception e) { reporter?.Fail("Failed Exporting.\n" + e.Message); throw; } } }
/// <summary> /// Exports all <see cref="UnityEngine.Localization.Tables.StringTable"/> in <paramref name="collection"/> using default column mappings generated through /// <see cref="ColumnMapping.CreateDefaultMapping(bool)"/>. /// </summary> /// <param name="writer">The target that will be populated with CSV data.</param> /// <param name="collection">The collection to export to CSV.</param> /// <param name="reporter">An optional reporter that can be used to provide feedback during export.</param> /// <example> /// This example shows how to export a <see cref="StringTableCollection"/> to a csv file. /// <code> /// using (var stream = new StreamWriter("my CSV file.csv", false, Encoding.UTF8)) /// { /// var stringTableCollection = LocalizationEditorSettings.GetStringTableCollection("My Strings"); /// Export(stream, stringTableCollection); /// } /// </code> /// </example> public static void Export(TextWriter writer, StringTableCollection collection, ITaskReporter reporter = null) { Export(writer, collection, ColumnMapping.CreateDefaultMapping(), reporter); }
/// <summary> /// Import the CSV data into <paramref name="collection"/> using default column mappings generated using <see cref="ColumnMapping.CreateDefaultMapping(bool)"/>. /// </summary> /// <param name="reader"></param> /// <param name="collection"></param> /// <param name="columnMappings"></param> /// <param name="createUndo"></param> /// <param name="reporter"></param> public static void ImportInto(TextReader reader, StringTableCollection collection, IList <CsvColumns> columnMappings, bool createUndo = false, ITaskReporter reporter = null) { if (reader == null) { throw new ArgumentNullException(nameof(reader)); } if (collection == null) { throw new ArgumentNullException(nameof(collection)); } VerifyColumnMappings(columnMappings); var modifiedAssets = collection.StringTables.Select(t => t as UnityEngine.Object).ToList(); modifiedAssets.Add(collection.SharedData); if (createUndo) { Undo.RegisterCompleteObjectUndo(modifiedAssets.ToArray(), "Import CSV"); } try { using (var csvReader = new CsvReader(reader, CultureInfo.InvariantCulture)) { csvReader.Read(); csvReader.ReadHeader(); reporter?.Start("Importing CSV", string.Empty); reporter?.ReportProgress("Mapping Headers", 0); foreach (var col in columnMappings) { col.ReadBegin(collection, csvReader); } var keyCell = columnMappings.First(o => o is IKeyColumn) as IKeyColumn; reporter?.ReportProgress("Reading Contents", 0.1f); while (csvReader.Read()) { var keyEntry = keyCell.ReadKey(csvReader); foreach (var cell in columnMappings) { cell.ReadRow(keyEntry, csvReader); } } foreach (var cell in columnMappings) { cell.ReadEnd(collection); } } modifiedAssets.ForEach(EditorUtility.SetDirty); LocalizationEditorSettings.EditorEvents.RaiseCollectionModified(null, collection); reporter?.Completed("Finished Importing"); } catch (Exception e) { reporter?.Fail("Failed Importing.\n" + e.Message); throw; } }
/// <summary> /// Import the CSV data into <paramref name="collection"/> using <paramref name="columnMappings"/> to control what data will be imported. /// See <seealso cref="KeyIdColumns"/> and <seealso cref="LocaleColumns"/> for further details. /// </summary> /// <param name="reader">The source of the CSV data.</param> /// <param name="collection">The target collection to be updated using the CSV data.</param> /// <param name="createUndo">Should an Undo operation be created so the changes can be undone?</param> /// <param name="reporter">An optional reporter that can be used to provide feedback during import.</param> public static void ImportInto(TextReader reader, StringTableCollection collection, bool createUndo = false, ITaskReporter reporter = null) { ImportInto(reader, collection, ColumnMapping.CreateDefaultMapping(), createUndo, reporter); }
public void SyncMetadata(ITaskReporter reporter) { this.categories = this.optigem.GetCategories().ToList(); Log.Source.TraceEvent(TraceEventType.Verbose, 0, "Fetched {0} groups from Optigem.", categories.Count); this.zuordnungen = this.optigem.GetAllKategorieZuordnungen().ToList(); reporter.StartTask("Personenkategorien abgleichen", categories.Count + 1); this.groups = this.GetLdapGroups(); Log.Source.TraceEvent(TraceEventType.Verbose, 0, "Fetched {0} groups from LDAP.", this.groups.Count); var openGroups = groups.Where(g => g.SyncGroupSource == this.configuration.LdapSyncGroupSource).ToList(); Log.Source.TraceEvent(TraceEventType.Verbose, 0, "{0} groups from LDAP have correct sync source ('{1}').", openGroups.Count, this.configuration.LdapSyncGroupSource); foreach (PersonenkategorieModel category in categories) { reporter.Progress(category.Name); var group = groups.FirstOrDefault(g => g.SyncGroupSource == this.configuration.LdapSyncGroupSource && g.SyncGroupId == category.Id); string name = LdapBuilder.GetCn(category.Name); string groupDn = $"cn={name},{this.configuration.LdapGruppenBaseDn}"; if (group != null) { if (group.Name != name) { this.ldap.MoveEntry($"cn={group.Name},{this.configuration.LdapGruppenBaseDn}", this.configuration.LdapGruppenBaseDn, $"cn={name}"); Log.Source.TraceEvent(TraceEventType.Information, 0, "Updated group name from '{0}' to '{1}'.", group.Name, name); reporter.Log($"Updated group name from '{group.Name}' to '{name}'."); group.Name = name; } openGroups.Remove(group); } else { // Add new group this.ldap.AddEntry( groupDn, new[] { new DirectoryAttribute("cn", name), new DirectoryAttribute("syncgroupsource", this.configuration.LdapSyncGroupSource), new DirectoryAttribute("syncgroupid", category.Id.ToString()), new DirectoryAttribute("objectclass", "top", "groupOfNames", "feggroup") }); Log.Source.TraceEvent(TraceEventType.Information, 0, "Added new group '{0}'.", groupDn); reporter.Log($"Added new group '{groupDn}'."); if (this.configuration.LdapDefaultParentGroups != null) { foreach (string parentGroupDn in this.configuration.LdapDefaultParentGroups) { var addAttribute = new DirectoryAttributeModification { Name = "member", Operation = DirectoryAttributeOperation.Add }; addAttribute.Add(groupDn); this.ldap.ModifyEntry( parentGroupDn, new[] { addAttribute }); Log.Source.TraceEvent(TraceEventType.Information, 0, "Added new group '{0}' to parent group '{1}'.", name, parentGroupDn); reporter.Log($"Added new group '{name}' to parent group '{parentGroupDn}'."); } } groups.Add(new LdapGroup { Name = name, SyncGroupId = category.Id, SyncGroupSource = this.configuration.LdapSyncGroupSource }); } } reporter.Progress("Checking for obsolete groups..."); // Remove obsolete groups foreach (var group in openGroups) { this.ldap.DeleteEntry($"cn={group.Name},{this.configuration.LdapGruppenBaseDn}"); Log.Source.TraceEvent(TraceEventType.Information, 0, "Deleted obsolete group '{0}'.", group.Name); reporter.Log($"Deleted obsolete group '{group.Name}'."); groups.Remove(group); } }
/// <summary> /// Import an XLIFF document into the target table, ignoring <see cref="IXliffDocument.TargetLanguage"/>. /// </summary> /// <param name="document">The XLIFF document to import.</param> /// <param name="target">The target table that will be populated with the translated values.</param> /// <param name="importNotesBehavior">How should the notes be imported?</param> /// <param name="reporter">Optional reporter which can report the current progress.</param> public static void ImportDocumentIntoTable(IXliffDocument document, StringTable target, ImportNotesBehavior importNotesBehavior = ImportNotesBehavior.Replace, ITaskReporter reporter = null) { if (document == null) { throw new ArgumentNullException(nameof(document)); } if (target == null) { throw new ArgumentNullException(nameof(target)); } EditorUtility.SetDirty(target); float progress = reporter == null ? 0 : reporter.CurrentProgress + 0.1f; reporter?.ReportProgress("Importing XLIFF into table", progress); float progressStep = document.FileCount / (1.0f - progress); var options = new ImportOptions { UpdateSourceTable = false, ImportNotes = importNotesBehavior }; for (int i = 0; i < document.FileCount; ++i) { var f = document.GetFile(i); progress += progressStep; reporter?.ReportProgress($"Importing({i + 1}/{document.FileCount}) {f.Id}", progress); ImportIntoTables(f, null, target, options); } var collection = LocalizationEditorSettings.GetCollectionFromTable(target); if (collection != null) { LocalizationEditorSettings.EditorEvents.RaiseCollectionModified(document, collection); } reporter?.Completed("Finished importing XLIFF"); }
internal static void ExportSelected(LocaleIdentifier source, string dir, string name, XliffVersion version, Dictionary <StringTableCollection, HashSet <int> > collectionsWithSelectedIndexes, ITaskReporter reporter = null) { var documents = DictionaryPool <LocaleIdentifier, IXliffDocument> .Get(); try { // Used for reporting int totalTasks = collectionsWithSelectedIndexes.Sum(c => c.Value.Count); float taskStep = 1.0f / (totalTasks * 2.0f); float progress = 0; reporter?.Start($"Exporting {totalTasks} String Tables to XLIFF", string.Empty); foreach (var kvp in collectionsWithSelectedIndexes) { var stringTableCollection = kvp.Key; var sourceTable = stringTableCollection.GetTable(source) as StringTable; if (sourceTable == null) { var message = $"Collection {stringTableCollection.TableCollectionName} does not contain a table for the source language {source}"; reporter?.Fail(message); throw new Exception(message); } foreach (var stringTableIndex in kvp.Value) { var stringTable = stringTableCollection.StringTables[stringTableIndex]; reporter?.ReportProgress($"Generating document for {stringTable.name}", progress); progress += taskStep; if (!documents.TryGetValue(stringTable.LocaleIdentifier, out var targetDoc)) { targetDoc = CreateDocument(source, stringTable.LocaleIdentifier, version); documents[stringTable.LocaleIdentifier] = targetDoc; } AddTableToDocument(targetDoc, sourceTable, stringTable); } } // Now write the files foreach (var doc in documents) { var cleanName = CleanFileName(name); var fileName = $"{cleanName}_{doc.Key.Code}.xlf"; var filePath = Path.Combine(dir, fileName); reporter?.ReportProgress($"Writing {fileName}", progress); progress += taskStep; using (var stream = new FileStream(filePath, FileMode.Create, FileAccess.Write)) { doc.Value.Serialize(stream); } } reporter?.Completed($"Finished exporting"); } catch (Exception e) { reporter?.Fail(e.Message); throw; } finally { DictionaryPool <LocaleIdentifier, IXliffDocument> .Release(documents); } }
public LoggingReporter(string logFile, ITaskReporter wrappedReporter) { this.wrappedReporter = wrappedReporter; this.writer = File.AppendText(logFile); }
void GeneratePushRequests(int sheetId, StringTableCollection collection, IList <SheetColumn> columnMapping, List <Request> requestsToSend, ITaskReporter reporter) { // Prepare the tables - Sort the keys and table entries reporter?.ReportProgress("Sorting entries", 0.2f); var sortedKeyEntries = collection.SharedData.Entries.OrderBy(e => e.Id); var sortedTableEntries = new List <IOrderedEnumerable <StringTableEntry> >(); foreach (var table in collection.StringTables) { if (table != null) { var s = table.Values.OrderBy(e => e.KeyId); sortedTableEntries.Add(s); } } // Extract all the data so we can generate a request to send // We go through each Key, extract the table entries for that key if they exists and then pass this to each column. var currentTableRowIterator = sortedTableEntries.Select(o => { var itr = o.GetEnumerator(); itr.MoveNext(); return(itr); }).ToArray(); // Prepare the column requests. // We use a request per column as its possible that some columns in the sheet will be preserved and we don't want to write over them. reporter?.ReportProgress("Generating column headers", 0.25f); var columnSheetRequests = new List <PushColumnSheetRequest>(columnMapping.Count); foreach (var col in columnMapping) { var colRequest = new PushColumnSheetRequest(sheetId, col); columnSheetRequests.Add(colRequest); colRequest.Column.PushBegin(collection); colRequest.Column.PushHeader(collection, out var header, out var note); colRequest.AddHeader(header, note); } // Populate the requests from the string tables var currentTableRow = new StringTableEntry[sortedTableEntries.Count]; foreach (var keyRow in sortedKeyEntries) { // Extract the string table entries for this row for (int i = 0; i < currentTableRow.Length; ++i) { var tableRowItr = currentTableRowIterator[i]; if (tableRowItr.Current?.KeyId == keyRow.Id) { currentTableRow[i] = tableRowItr.Current; tableRowItr.MoveNext(); } else { currentTableRow[i] = null; } } // Now process each sheet column so they can update their requests. foreach (var colReq in columnSheetRequests) { colReq.Column.PushCellData(keyRow, currentTableRow, out var value, out var note); colReq.AddRow(value, note); } } foreach (var col in columnSheetRequests) { col.Column.PushEnd(); requestsToSend.AddRange(col.Requests); } }
/// <summary> /// Export the values in <paramref name="tables"/> using <paramref name="sourceLanguage"/> as the source language to one or more XLIFF files. /// </summary> /// <param name="sourceLanguage">This is the table that will be used as the source language for all generated XLIFF files.</param> /// <param name="directory">The directory where all generated XLIFF files will be saved to.</param> /// <param name="version">The XLIFF version to generate the files in.</param> /// <param name="tables">1 or more <see cref="StringTable"/> that will be used as the target language for each XLIFF file. 1 XLIFF file will be generated for each table.</param> /// <param name="reporter">Optional reporter which can report the current progress.</param> public static void Export(StringTable sourceLanguage, string directory, XliffVersion version, ICollection <StringTable> tables, ITaskReporter reporter = null) { if (sourceLanguage == null) { throw new ArgumentNullException(nameof(sourceLanguage)); } if (tables == null) { throw new ArgumentNullException(nameof(tables)); } try { // Used for reporting float taskStep = 1.0f / (tables.Count * 2.0f); float progress = 0; reporter?.Start($"Exporting {tables.Count} String Tables to XLIFF", string.Empty); // We need the key, source value and translated value. foreach (var stringTable in tables) { reporter?.ReportProgress($"Exporting {stringTable.name}", progress); progress += taskStep; var doc = CreateDocument(sourceLanguage.LocaleIdentifier, stringTable.LocaleIdentifier, version); AddTableToDocument(doc, sourceLanguage, stringTable); var cleanName = CleanFileName(stringTable.name); var fileName = $"{cleanName}.xlf"; var filePath = Path.Combine(directory, fileName); using (var stream = new FileStream(filePath, FileMode.Create, FileAccess.Write)) { doc.Serialize(stream); } } reporter?.Completed($"Finished exporting"); } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
void MergePull(Sheet sheet, StringTableCollection collection, IList <SheetColumn> columnMapping, bool removeMissingEntries, ITaskReporter reporter) { reporter?.ReportProgress("Preparing to merge", 0.55f); // Keep track of any issues for a single report instead of filling the console. var messages = new StringBuilder(); var keyColumn = columnMapping[0] as IPullKeyColumn; Debug.Assert(keyColumn != null, "Expected the first column to be a Key column"); var rowCount = sheet.Data[0].RowData.Count; // Send the start message foreach (var col in columnMapping) { col.PullBegin(collection); } reporter?.ReportProgress("Merging response into collection", 0.6f); var keysProcessed = new HashSet <uint>(); uint totalCellsProcessed = 0; for (int row = 0; row < rowCount; row++) { var keyColData = sheet.Data[0]; var keyRowData = keyColData.RowData[row].Values[0]; var keyValue = keyRowData.FormattedValue; var keyNote = keyRowData.Note; var rowKeyEntry = keyColumn.PullKey(keyValue, keyNote); if (rowKeyEntry == null) { messages.AppendLine($"No key data was found for row {row} with Value '{keyValue}' and Note '{keyNote}'."); continue; } // Record the id so we can check what key ids were missing later. keysProcessed.Add(rowKeyEntry.Id); totalCellsProcessed++; for (int col = 1; col < columnMapping.Count; ++col) { var colData = sheet.Data[col]; // Do we have data in this column for this row? if (colData.RowData?.Count > row && colData.RowData[row].Values?.Count > 0) { var cellData = colData.RowData[row].Values[0]; columnMapping[col].PullCellData(rowKeyEntry, cellData.FormattedValue, cellData.Note); totalCellsProcessed++; } } } if (removeMissingEntries) { reporter?.ReportProgress("Removing missing entries", 0.9f); RemoveMissingEntries(keysProcessed, collection, messages); } reporter?.Completed($"Completed merge of {rowCount} rows and {totalCellsProcessed} cells from {columnMapping.Count} columns successfully.\n{messages.ToString()}"); }
/// <summary> /// Imports all XLIFF files with the extensions xlf or xliff into existing <see cref="StringTableCollection"/> or new ones if a matching one could not be found. /// </summary> /// <param name="directory">The directory to search. Searches sub directories as well.</param> /// <param name="importOptions">Optional import options which can be used to configure the importing behavior.</param> /// <param name="reporter">Optional reporter which can report the current progress.</param> public static void ImportDirectory(string directory, ImportOptions importOptions = null, ITaskReporter reporter = null) { try { reporter?.Start("Importing XLIFF files in directory", "Finding xlf and xliff files."); var files = Directory.GetFiles(directory, "*", SearchOption.AllDirectories); var filteredFiles = files.Where(s => s.EndsWith(".xlf") || s.EndsWith(".xliff")); float taskStep = filteredFiles.Count() / 1.0f; float progress = taskStep; foreach (var f in filteredFiles) { reporter?.ReportProgress($"Importing {f}", progress); progress += taskStep; // Don't pass the reporter in as it will be Completed after each file and we only want to do that at the end. ImportFile(f, importOptions); } reporter?.Completed("Finished importing XLIFF files"); } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
void GeneratePushRequests(int sheetId, StringTableCollection collection, IList <SheetColumn> columnMapping, List <Request> requestsToSend, ITaskReporter reporter) { // Prepare the column requests. // We use a request per column as its possible that some columns in the sheet will be preserved and we don't want to write over them. reporter?.ReportProgress("Generating column headers", 0); var columnSheetRequests = new List <PushColumnSheetRequest>(columnMapping.Count); foreach (var col in columnMapping) { var colRequest = new PushColumnSheetRequest(sheetId, col); columnSheetRequests.Add(colRequest); colRequest.Column.PushBegin(collection); colRequest.Column.PushHeader(collection, out var header, out var note); colRequest.AddHeader(header, note); } var stringTables = collection.StringTables; var tableEntries = new StringTableEntry[stringTables.Count]; reporter?.ReportProgress("Generating push data", 0.1f); foreach (var keyEntry in collection.SharedData.Entries) { // Collect the table entry data. for (int i = 0; i < stringTables.Count; ++i) { tableEntries[i] = stringTables[i].GetEntry(keyEntry.Id); } // Now process each sheet column so they can update their requests. foreach (var colReq in columnSheetRequests) { colReq.Column.PushCellData(keyEntry, tableEntries, out var value, out var note); colReq.AddRow(value, note); } } foreach (var col in columnSheetRequests) { col.Column.PushEnd(); requestsToSend.AddRange(col.Requests); } }
/// <summary> /// Imports a single XLIFF file into the project. /// Attempts to find matching <see cref="StringTableCollection"/>'s, if one could not be found then a new one is created. /// </summary> /// <param name="file">The XLIFF file.</param> /// <param name="importOptions">Optional import options which can be used to configure the importing behavior.</param> /// <param name="reporter">Optional reporter which can report the current progress.</param> public static void ImportFile(string file, ImportOptions importOptions = null, ITaskReporter reporter = null) { reporter?.Start("Importing XLIFF", $"Importing {file}"); try { if (!File.Exists(file)) { throw new FileNotFoundException($"Could not find file {file}"); } using (var stream = new FileStream(file, FileMode.Open, FileAccess.Read)) { reporter?.ReportProgress("Parsing XLIFF", 0.1f); var document = XliffDocument.Parse(stream); ImportDocument(document, importOptions, reporter); } } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
/// <summary> /// Creates a new Google Spreadsheet. /// </summary> /// <param name="spreadSheetTitle">The title of the Spreadsheet.</param> /// <param name="sheetTtitle">The title of the sheet(tab) that is part of the Spreadsheet.</param> /// <param name="newSheetProperties"></param> /// <param name="reporter">Optional reporter to display the progress and status of the task.</param> /// <returns>Returns the new Spreadsheet and sheet id.</returns> public (string spreadSheetId, int sheetId) CreateSpreadsheet(string spreadSheetTitle, string sheetTitle, NewSheetProperties newSheetProperties, ITaskReporter reporter = null) { if (newSheetProperties == null) { throw new ArgumentNullException(nameof(newSheetProperties)); } try { reporter?.Start("Create Spreadsheet", "Preparing Request"); var createRequest = SheetsService.Service.Spreadsheets.Create(new Spreadsheet { Properties = new SpreadsheetProperties { Title = spreadSheetTitle }, Sheets = new Sheet[] { new Sheet { Properties = new SheetProperties { Title = sheetTitle, } } } }); reporter?.ReportProgress("Sending create request", 0.2f); var createResponse = ExecuteRequest <Spreadsheet, CreateRequest>(createRequest); SpreadSheetId = createResponse.SpreadsheetId; var sheetId = createResponse.Sheets[0].Properties.SheetId.Value; reporter?.ReportProgress("Setting up new sheet", 0.5f); SetupSheet(SpreadSheetId, sheetId, newSheetProperties); reporter?.Completed(string.Empty); return(SpreadSheetId, sheetId); } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
/// <summary> /// Imports a single XLIFF document into the project. /// Attempts to find matching <see cref="StringTableCollection"/>'s, if one could not be found then a new one is created. /// </summary> /// <param name="document">The root XLIFF document.</param> /// <param name="importOptions">Optional import options which can be used to configure the importing behavior.</param> /// <param name="reporter">Optional reporter which can report the current progress.</param> public static void ImportDocument(IXliffDocument document, ImportOptions importOptions = null, ITaskReporter reporter = null) { if (document == null) { throw new ArgumentNullException(nameof(document)); } reporter?.Start("Importing XLIFF", "Importing document"); try { float progress = reporter == null ? 0.1f : reporter.CurrentProgress + 0.1f; reporter?.ReportProgress("Importing XLIFF into project", progress); float progressStep = document.FileCount / (1.0f - progress); var options = importOptions ?? s_DefaultOptions; for (int i = 0; i < document.FileCount; ++i) { var f = document.GetFile(i); progress += progressStep; reporter?.ReportProgress($"Importing({i + 1}/{document.FileCount}) {f.Id}", progress); ImportFileNode(f, document.SourceLanguage, document.TargetLanguage, options); } reporter?.Completed("Finished importing XLIFF"); } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
/// <summary> /// Create blocking task /// </summary> public SearchTask(string name, string title, int total, ITaskReporter reporter) : this(name, title, reporter) { this.total = total; progressId = StartBlockingReport(title); }
/// <summary> /// Import the XLIFF file into the collection. /// </summary> /// <param name="collection">The collection to import all the XLIFF data into.</param> /// <param name="file">The XLIFF file path.</param> /// <param name="importOptions">Optional import options which can be used to configure the importing behavior.</param> /// <param name="reporter">Optional reporter which can report the current progress.</param> public static void ImportFileIntoCollection(StringTableCollection collection, string file, ImportOptions importOptions = null, ITaskReporter reporter = null) { if (collection == null) { throw new ArgumentNullException(nameof(collection)); } reporter?.Start("Importing XLIFF", $"Importing {file}"); try { if (!File.Exists(file)) { throw new FileNotFoundException($"Could not find file {file}"); } using (var stream = new FileStream(file, FileMode.Open, FileAccess.Read)) { reporter?.ReportProgress("Parsing XLIFF", 0.1f); var document = XliffDocument.Parse(stream); float progress = 0.3f; reporter?.ReportProgress("Importing XLIFF into project", progress); float progressStep = document.FileCount / 1.0f * 0.7f; var options = importOptions ?? s_DefaultOptions; for (int i = 0; i < document.FileCount; ++i) { var f = document.GetFile(i); progress += progressStep; reporter?.ReportProgress($"Importing({i + 1}/{document.FileCount}) {f.Id}", progress); ImportFileIntoCollection(collection, f, document.SourceLanguage, document.TargetLanguage, options); } reporter?.Completed("Finished importing XLIFF"); } } catch (Exception e) { reporter?.Fail(e.Message); throw; } }
public SearchTask(string name, string title, ResolveHandler finished, ITaskReporter reporter) : this(name, title, finished, 1, reporter) { }
/// <summary> /// Import an XLIFF file into the target table, ignoring <see cref="IXliffDocument.TargetLanguage"/>. /// </summary> /// <param name="file">The XLIFF file path.</param> /// <param name="target">The target table that will be populated with the translated values.</param> /// <param name="importNotesBehavior">How should the notes be imported?</param> /// <param name="reporter">Optional reporter which can report the current progress.</param> public static void ImportFileIntoTable(string file, StringTable target, ImportNotesBehavior importNotesBehavior = ImportNotesBehavior.Replace, ITaskReporter reporter = null) { if (target == null) { throw new ArgumentNullException(nameof(target)); } reporter?.Start("Importing XLIFF", $"Importing {file}"); try { if (!File.Exists(file)) { throw new FileNotFoundException($"Could not find file {file}"); } using (var stream = new FileStream(file, FileMode.Open, FileAccess.Read)) { reporter?.ReportProgress("Parsing XLIFF", 0.1f); var document = XliffDocument.Parse(stream); ImportDocumentIntoTable(document, target, importNotesBehavior, reporter); } } catch (Exception e) { reporter?.Fail(e.Message); throw; } }