public static ImportRun RunTest(EntityType entityType, string fileName, ImportFormat importFormat, string sheetName = null) { string fileToken; using (Stream stream = SheetTestHelper.GetStream(fileName)) { fileToken = FileRepositoryHelper.AddTemporaryFile(stream); } EntityType type = entityType; ImportConfig importConfig = CreateImportConfig(type, importFormat, sheetName); ImportRun importRun = CreateImportRun(importConfig, fileToken); ISpreadsheetInspector inspector = Factory.Current.Resolve <ISpreadsheetInspector>( ); SpreadsheetInfo info = inspector.GetSpreadsheetInfo(fileToken, importFormat); SampleTable sample = inspector.GetSampleTable(fileToken, importFormat, sheetName, 1, 2, null); AddAllFields(importConfig, sample); // Run import IImportRunWorker worker = Factory.Current.Resolve <IImportRunWorker>( ); worker.StartImport(importRun.Id); return(importRun); }
/// <summary> /// Read the import file type from an import confing. /// </summary> /// <param name="importConfig">The import config entity.</param> /// <returns>The import status.</returns> internal static ImportFormat GetImportFormat(ImportConfig importConfig) { if (importConfig == null) { throw new ArgumentNullException(nameof(importConfig)); } var fileType = importConfig.ImportFileType_Enum; if (fileType == null) { return(ImportFormat.Excel); // assert false } switch (fileType.Value) { case ImportFileTypeEnum_Enumeration.ImportFileTypeCsv: return(ImportFormat.CSV); case ImportFileTypeEnum_Enumeration.ImportFileTypeTab: return(ImportFormat.Tab); case ImportFileTypeEnum_Enumeration.ImportFileTypeExcel: return(ImportFormat.Excel); default: throw new InvalidOperationException( ); } }
/// <summary> /// Parses .e57 file. /// </summary> public static IEnumerable <Chunk> Chunks(string filename, ImportConfig config) { var fileSizeInBytes = new FileInfo(filename).Length; var stream = File.Open(filename, FileMode.Open, FileAccess.Read, FileShare.Read); return(Chunks(stream, fileSizeInBytes, config)); }
public static ImportRun CsvImport(ImportConfig config, string csvData) { ImportSettings settings = new ImportSettings { FileToken = "whatever", ImportConfigId = config.Id }; IFileRepository fileRepository = MockFileRepository.FromText(csvData); using (var scope = Factory.Current .BeginLifetimeScope(builder => builder.RegisterInstance(fileRepository).Named <IFileRepository>(FileRepositoryModule.TemporaryFileRepositoryName))) { Assert.That(scope.ResolveKeyed <IFileRepository>(FileRepositoryModule.TemporaryFileRepositoryName), Is.InstanceOf <MockFileRepository>( )); // Create import run var si = ( SpreadsheetImporter )scope.Resolve <ISpreadsheetImporter>( ); ImportRun importRun = si.CreateImportRunEntity(config, settings); long importRunId = importRun.Id; IImportRunWorker worker = scope.Resolve <IImportRunWorker>( ); worker.StartImport(importRunId); return(importRun); } }
/// <summary> /// Imports sequence of chunks. /// </summary> public static PointSet Chunks(IEnumerable <Chunk> chunks, ImportConfig config) { // optionally filter minDist if (config.MinDist > 0.0) { chunks = chunks.Select(x => x.ImmutableFilterSequentialMinDistL1(config.MinDist)); } // optionally deduplicate points if (config.DeduplicateChunks) { chunks = chunks.Select(x => x.ImmutableDeduplicate()); } // optionally reproject positions and/or estimate normals if (config.Reproject != null || config.EstimateNormals != null) { Chunk map(Chunk x, CancellationToken ct) { if (config.Reproject != null) { var ps = config.Reproject(x.Positions); x = x.WithPositions(ps); } if (config.EstimateNormals != null) { var ns = config.EstimateNormals(x.Positions); x = x.WithNormals(ns); } return(x); } chunks = chunks.MapParallel(map, config.MaxDegreeOfParallelism, null, config.CancellationToken); } // reduce all chunks to single PointSet var final = chunks .MapReduce(config.WithRandomKey().WithProgressCallback(x => config.ProgressCallback(x * 0.66))) ; // optionally create LOD data if (config.CreateOctreeLod) { final = final.GenerateLod(config.WithRandomKey().WithProgressCallback(x => config.ProgressCallback(0.66 + x * 0.34))); } // create final point set with specified key (or random key when no key is specified) var key = config.Key ?? Guid.NewGuid().ToString(); final = new PointSet(config.Storage, key, final?.Root?.Value?.Id, config.OctreeSplitLimit); config.Storage.Add(key, final, config.CancellationToken); return(final); }
private void RunPostProcessors(IDataMap map) { ImportConfig config = new ImportConfig(map.ProviderItem, map.ToDB, map.Query); config.ImportLocation = map.ImportToWhere; foreach (var processor in config.PostProcessors) { Processor.Execute(processor.ProcessItem, config); } }
public void Run(Item processor, ImportConfig config) { string fieldName = processor.Fields["Field Name"].Value; string domains = processor.Fields["Internal Domains"].Value; domains.Split(new[] { '\r' }, StringSplitOptions.RemoveEmptyEntries).ToList(); List <string> internalDomains = domains.Split(new[] { '\n' }).ToList(); UpdateFields(config.ImportLocation, config, fieldName, internalDomains, processor); }
/// <summary> /// Parses file. /// Format is guessed based on file extension. /// </summary> public static IEnumerable <Chunk> Parse(string filename, ImportConfig config) { if (filename == null) { throw new ArgumentNullException(nameof(filename)); } if (!File.Exists(filename)) { throw new FileNotFoundException($"File does not exit ({filename}).", filename); } return(PointCloudFileFormat.FromFileName(filename).ParseFile(filename, config)); }
private static PointSet GenerateNormals(this PointSet self, Action callback, ImportConfig config) { if (self.IsEmpty) { return(self); } var normals = self.Root.Value.GenerateNormals(callback, config); var result = new PointSet(self.Storage, config.Key, normals.Id, self.SplitLimit); self.Storage.Add(config.Key, result, config.CancellationToken); return(result); }
/// <summary> /// Get an adapter for writing entities. /// </summary> private IReaderToEntityAdapter GetEntityAdapter(ImportConfig importConfig) { ApiResourceMapping mapping = importConfig.ImportConfigMapping; ReaderToEntityAdapterSettings settings = new ReaderToEntityAdapterSettings( ); settings.UseTargetMemberNameForReporting = true; //settings.TimeZoneName IReaderToEntityAdapter entityAdapter = _readerToEntityAdapterProvider.GetAdapter(mapping.Id, settings); return(entityAdapter); }
private static ImportRun CreateImportRun(ImportConfig importConfig, string fileToken) { SpreadsheetImporter importer = ( SpreadsheetImporter )Factory.Current.Resolve <ISpreadsheetImporter>( ); ImportSettings importSettings = new ImportSettings { FileToken = fileToken, ImportConfigId = importConfig.Id, TimeZoneName = "" }; ImportRun importRun = importer.CreateImportRunEntity(importConfig, importSettings); return(importRun); }
/// <summary> /// Gets general info for .pts file. /// </summary> public static PointFileInfo YxhInfo(string filename, ImportConfig config) { var filesize = new FileInfo(filename).Length; var pointCount = 0L; var pointBounds = Box3d.Invalid; foreach (var chunk in Chunks(filename, ImportConfig.Default)) { pointCount += chunk.Count; pointBounds.ExtendBy(chunk.BoundingBox); } return(new PointFileInfo(filename, YxhFormat, filesize, pointCount, pointBounds)); }
private void RunPreProcessors(ImportConfig config, HtmlDocument doc, DataRow dr, string currentDirURL) { foreach (var processor in config.PreProcessors) { string returnValue = Processor.Execute(processor.ProcessItem, doc, currentDirURL, ImportToWhatTemplate.ID.ToString()); if (string.IsNullOrEmpty(returnValue)) { continue; } dr[ActionColumn] = returnValue; } }
private static ImportConfig CreateImportConfig(EntityType entityType, ImportFormat importFormat, string sheetId) { ApiResourceMapping resourceMapping = new ApiResourceMapping( ); resourceMapping.MappedType = entityType; resourceMapping.MappingSourceReference = sheetId; resourceMapping.ImportHeadingRow = 1; resourceMapping.ImportDataRow = 2; ImportConfig importConfig = new ImportConfig( ); importConfig.ImportConfigMapping = resourceMapping; importConfig.ImportFileType_Enum = importFormat == ImportFormat.CSV ? ImportFileTypeEnum_Enumeration.ImportFileTypeCsv : ImportFileTypeEnum_Enumeration.ImportFileTypeExcel; return(importConfig); }
/// <summary> /// Creates an importRun entity - does not save it. /// </summary> /// <param name="importConfig">The import configuration.</param> /// <param name="importSettings">Settings passed in for the current run.</param> /// <returns>Returns the ID of the import run.</returns> internal ImportRun CreateImportRunEntity(ImportConfig importConfig, ImportSettings importSettings) { // Create a new import run ImportRun importRun = _entityRepository.Create <ImportRun>( ); importRun.ImportRunStatus_Enum = WorkflowRunState_Enumeration.WorkflowRunQueued; importRun.ImportConfigUsed = importConfig; importRun.ImportFileId = importSettings.FileToken; importRun.ImportFileName = importSettings.FileName; importRun.ImportTimeZone = importSettings.TimeZoneName; importRun.ImportRecordsSucceeded = 0; importRun.ImportRecordsFailed = 0; importRun.ImportTestRun = importSettings.TestRun; return(importRun); }
/// <summary> /// uses the query field to retrieve file data /// </summary> /// <returns></returns> public override IEnumerable <object> GetImportData() { mappings = GetMappings(); DataTable dt = new DataTable(); dt.Columns.Add(ItemNameColumn); dt.Columns.Add(PathColumn); dt.Columns.Add(RequestedURL); dt.Columns.Add(ActionColumn); ImportConfig config = new ImportConfig(ImportItem, ToDB, this.Query); config.ImportLocation = ImportToWhere; Config = config; if (ItemNameFields.FirstOrDefault() == "[URL]") { ItemNameFields[0] = ItemNameColumn; } //Adding columns to the table from field mapping foreach (var key in mappings) { string toField = mappings[key.ToString()]; dt.Columns.Add(toField); } List <string> urls = config.StoredURLs; foreach (var url in urls) { string relativeURL = url.Replace("http://", "").Replace("https://", ""); Char[] splitChars = { '/' }; //parts are the directory list in array format, ie. second array is the child of first etc.. List <string> levels = config.IgnoreRootDirectories ? relativeURL.Split(splitChars, StringSplitOptions.RemoveEmptyEntries).Skip(1).ToList() : relativeURL.Split(splitChars, StringSplitOptions.RemoveEmptyEntries).ToList(); if (config.ExcludeDirectories != null && config.ExcludeDirectories.Any()) { levels.RemoveAll(x => config.ExcludeDirectories.Contains(x.ToLower())); } BuildData(config, levels, url, dt); } return((dt.Rows).Cast <object>()); }
public Host() : base() { _SingletonMode = true; Plugins = new List <IPlugin>(); Widgets = new List <IWidget>(); Settings = new List <ICustomSettings>(); ImportPlugins = new List <IImportPlugin>(); ExportPlugins = new List <IExportPlugin>(); StartPlugins = new List <IStartPlugin>(); try { Histories = Util.Deserialize <List <History> >(History.HistoryCachePath); } catch (Exception) { Histories = new List <History>(); } try { ImportConfig = Util.XmlDeserialize <ImportConfig>(ImportConfig.ImageConfigPath); } catch (Exception) { ImportConfig = new ImportConfig(); } ThreadExit += (s, e) => { Properties.Settings.Default.Save(); Util.Serialize(History.HistoryCachePath, Histories); }; //RegisterSettings(new PreferencesSettings(this)); LoadPlugins(); MainForm = new MainForm(this); #if DEBUG foreach (var item in Plugins) { Console.WriteLine("--------------------------------------"); Console.WriteLine($"Name: {item.Name}"); Console.WriteLine($"Version: {item.Version}"); Console.WriteLine($"Guid: {item.Guid}"); Console.WriteLine($"Description: {item.Description}"); Console.WriteLine($"DataPath:{GetPluginDataPath(item)}"); } #endif }
/// <summary> /// Get reader settings from the config entity. /// </summary> /// <param name="importConfig">The import config entity.</param> /// <returns>Reader settings</returns> private static DataFileReaderSettings CreateReaderSettings(ImportConfig importConfig) { ImportFormat importFormat = ImportHelpers.GetImportFormat(importConfig); ApiResourceMapping mapping = importConfig.ImportConfigMapping; DataFileReaderSettings settings = new DataFileReaderSettings { ImportFormat = importFormat, HeadingRowNumber = mapping.ImportHeadingRow ?? 1, FirstDataRowNumber = mapping.ImportDataRow ?? 2, LastDataRowNumber = mapping.ImportLastDataRow, // default is null SheetId = mapping.MappingSourceReference }; return(settings); }
/// <summary> /// Get all the mapping columns from selected entity Type. /// </summary> public static void AddAllFields(ImportConfig importConfig) // List<ColumnInfo> mappingColumnCollection, EntityType type, DbDataTable sampleDataTable ) { EntityType type = importConfig.ImportConfigMapping.MappedType; var allFields = EntityTypeHelper.GetAllFields(type); foreach (Field field in allFields) { if (field.Name == "Alias") { continue; } ApiFieldMapping fieldMapping = new ApiFieldMapping( ); fieldMapping.Name = field.Name; fieldMapping.MappedField = field; } }
/// <summary> /// Begin task to import spreadsheet data. /// </summary> /// <param name="importSettings">The settings of the import.</param> /// <returns>Returns the ID of the import run.</returns> public long StartImport(ImportSettings importSettings) { // Validate if (importSettings == null) { throw new ArgumentNullException(nameof(importSettings)); } if (string.IsNullOrEmpty(importSettings.FileToken)) { throw new ArgumentException("importSettings.FileToken"); } // Load the config ImportConfig importConfig = SecurityBypassContext.ElevateIf( importSettings.SuppressSecurityCheckOnImportConfig, () => _entityRepository.Get <ImportConfig>(importSettings.ImportConfigId)); if (importConfig == null) { throw new ArgumentException("importSettings.ImportConfigId"); } // Create a new import run ImportRun importRun = CreateImportRunEntity(importConfig, importSettings); SecurityBypassContext.Elevate(importRun.Save); long importRunId = importRun.Id; try { _asyncRunner.Start(() => _importRunWorker.StartImport(importRunId)); } catch { // Async operation failed to start // (This is not reached if the import itself fails) importRun.ImportRunStatus_Enum = WorkflowRunState_Enumeration.WorkflowRunFailed; importRun.ImportMessages = "Failed to start importer."; SecurityBypassContext.Elevate(importRun.Save); throw; } return(importRun.Id); }
static async Task ImportDsetExportCsv(ImportConfig config, string destinationFile) { Console.WriteLine($"Started {config.FileName}"); var importDriver = DriverFactory.GetImportDriver(); var datasets = await importDriver.ImportAsync(config); if (datasets.Length == 0) { Console.WriteLine($"Source file is empty, nothing to export: {config.FileName}"); return; } var dataset = datasets.First(); CsvExportDriver.WriteData(dataset, destinationFile, ",", config.Token); Console.WriteLine($"Finished {config.FileName}"); }
private static ImportResultInfo TestUpload(ImportConfig config, bool testRun, Action <PlatformHttpRequest> callback, out long importRunId) { // ImportSpreadsheetData string uri = $"data/v2/importSpreadsheet/import?config={config.Id}&file={_fileToken}&filename={TestFileName}"; if (testRun) { uri += "&testrun=true"; } using (var request = new PlatformHttpRequest(uri)) { if (callback != null) { callback(request); } HttpWebResponse response = request.GetResponse( ); // check that it worked (200) Assert.That(response.StatusCode, Is.EqualTo(HttpStatusCode.OK)); importRunId = request.DeserialiseResponseBody <long>( ); } // GetImportStatus for (int count = 0; count < 50; count++) { uri = $"data/v2/importSpreadsheet/import/{importRunId}"; using (var request = new PlatformHttpRequest(uri)) { HttpWebResponse response = request.GetResponse( ); // check that it worked (200) Assert.That(response.StatusCode, Is.EqualTo(HttpStatusCode.OK)); ImportResultInfo status = request.DeserialiseResponseBody <ImportResultInfo>( ); if (status.ImportStatus != ImportStatus.InProgress) { return(status); } } Thread.Sleep(500); } throw new Exception("Took too long.."); }
public static void Execute(Item processor, ImportConfig config) { string type = processor.Fields["Type"].Value; string method = processor.Fields["Method"].Value; string nameSpaceInfo = (type.Split(',')[0]).Trim(); string dllInfo = (type.Split(',')[1]).Trim(); dllInfo += ".dll"; string path = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) + "\\"; Assembly assembly = Assembly.LoadFile(path + dllInfo); Type assemblyType = assembly.GetType(nameSpaceInfo); MethodInfo myMethod = assemblyType.GetMethod(method); object[] parameters = { processor, config }; object obj = Activator.CreateInstance(assemblyType); myMethod.Invoke(obj, parameters); }
/// <summary> /// Imports file. /// Format is guessed based on file extension. /// </summary> public static PointSet Import(string filename, ImportConfig config = null) { if (filename == null) { throw new ArgumentNullException(nameof(filename)); } if (!File.Exists(filename)) { throw new FileNotFoundException("File does not exit.", filename); } if (config == null) { config = ImportConfig.Default .WithInMemoryStore() .WithKey(FileHelpers.ComputeMd5Hash(filename, true)) ; } return(PointCloudFileFormat.FromFileName(filename).ImportFile(filename, config)); }
/// <summary> /// </summary> public PointSet Merge(PointSet other, Action <long> pointsMergedCallback, ImportConfig config) { if (other.IsEmpty) { return(this); } if (this.IsEmpty) { return(other); } if (this.Storage != other.Storage) { throw new InvalidOperationException("Invariant 3267c283-3192-438b-a219-821d67ac5061."); } if (Root.Value is PointSetNode root && other.Root.Value is PointSetNode otherRoot) { var merged = root.Merge(otherRoot, pointsMergedCallback, config); var id = $"{Guid.NewGuid()}.json"; return(new PointSet(Storage, id, merged.Item1.Id, SplitLimit)); }
public ImportRun GetMockRun(string token) { EntityType type = new EntityType(); ApiResourceMapping mapping = new ApiResourceMapping(); mapping.MappedType = type; mapping.MappingSourceReference = "Test1"; mapping.ImportHeadingRow = 3; mapping.ImportDataRow = 4; ImportConfig importConfig = new ImportConfig( ); importConfig.ImportFileType_Enum = ImportFileTypeEnum_Enumeration.ImportFileTypeExcel; importConfig.ImportConfigMapping = mapping; ImportRun importRun = new ImportRun( ); importRun.ImportConfigUsed = importConfig; importRun.ImportRunStatus_Enum = WorkflowRunState_Enumeration.WorkflowRunQueued; importRun.ImportFileId = token; return(importRun); }
/// <summary> /// Get all the mapping columns from selected entity Type. /// </summary> private static void AddAllFields(ImportConfig importConfig, SampleTable sample) // List<ColumnInfo> mappingColumnCollection, EntityType type, DbDataTable sampleDataTable ) { EntityType type = importConfig.ImportConfigMapping.MappedType; var allFields = EntityTypeHelper.GetAllFields(type); foreach (Field field in allFields) { if (field.Name == "Alias") { continue; } SampleColumn column = sample.Columns.FirstOrDefault(col => col.Name == field.Name); if (column == null) { continue; } ApiFieldMapping fieldMapping = new ApiFieldMapping( ); fieldMapping.Name = column.ColumnName; fieldMapping.MappedField = field; importConfig.ImportConfigMapping.ResourceMemberMappings.Add(fieldMapping.As <ApiMemberMapping>( )); } }
/// <summary> /// Open up a records reader to read the contents of the file. /// </summary> /// <remarks> /// Caller closes stream. /// </remarks> private IObjectsReader GetRecordsReader(ImportRun importRun, ImportConfig importConfig) { // Get settings DataFileReaderSettings settings = CreateReaderSettings(importConfig); // Get the timezone if (!string.IsNullOrEmpty(importRun.ImportTimeZone)) { settings.TimeZoneInfo = TimeZoneHelper.GetTimeZoneInfo(importRun.ImportTimeZone); } // Get file reader IDataFileReaderService fileReader = _readerActivator(settings.ImportFormat); // Open stream string fileUploadId = importRun.ImportFileId; if (string.IsNullOrEmpty(fileUploadId)) { throw new Exception("File handle not set"); } Stream fileStream; try { fileStream = FileRepository.Get(fileUploadId); } catch (Exception ex) { throw new Exception("Could not retrieve file. " + ex.Message, ex); } IObjectsReader recordsReader = fileReader.OpenDataFile(fileStream, settings); return(recordsReader); }
/// <summary> /// Imports sequence of chunks. /// </summary> public static PointSet Chunks(IEnumerable <Chunk> chunks, ImportConfig config) { config?.ProgressCallback(0.0); // deduplicate points chunks = chunks.Select(x => x.ImmutableDeduplicate(config.Verbose)); // merge small chunks chunks = MergeSmall(config.MaxChunkPointCount, chunks); // filter minDist if (config.MinDist > 0.0) { if (config.NormalizePointDensityGlobal) { chunks = chunks.Select(x => x.ImmutableFilterMinDistByCell(new Cell(x.BoundingBox), config.ParseConfig)); } else { chunks = chunks.Select(x => x.ImmutableFilterSequentialMinDistL1(config.MinDist)); } } // merge small chunks chunks = MergeSmall(config.MaxChunkPointCount, chunks); // EXPERIMENTAL //Report.BeginTimed("unmix"); //chunks = chunks.ImmutableUnmixOutOfCore(@"T:\tmp", 1, config); //Report.End(); // reproject positions and/or estimate normals if (config.Reproject != null) { Chunk map(Chunk x, CancellationToken ct) { if (config.Reproject != null) { var ps = config.Reproject(x.Positions); x = x.WithPositions(ps); } return(x); } chunks = chunks.MapParallel(map, config.MaxDegreeOfParallelism, null, config.CancellationToken); } // reduce all chunks to single PointSet Report.BeginTimed("map/reduce"); var final = chunks .MapReduce(config.WithRandomKey().WithProgressCallback(x => config.ProgressCallback(0.01 + x * 0.65))) ; Report.EndTimed(); // create LOD data Report.BeginTimed("generate lod"); final = final.GenerateLod(config.WithRandomKey().WithProgressCallback(x => config.ProgressCallback(0.66 + x * 0.34))); if (final.Root != null && config.Storage.GetPointCloudNode(final.Root.Value.Id) == null) { throw new InvalidOperationException("Invariant 4d633e55-bf84-45d7-b9c3-c534a799242e."); } Report.End(); // create final point set with specified key (or random key when no key is specified) var key = config.Key ?? Guid.NewGuid().ToString(); #pragma warning disable CS0618 // Type or member is obsolete final = new PointSet(config.Storage, key, final?.Root?.Value?.Id, config.OctreeSplitLimit); #pragma warning restore CS0618 // Type or member is obsolete config.Storage.Add(key, final); return(final); }
/// <summary> /// Imports single chunk. /// </summary> public static PointSet Import(Chunk chunk, ImportConfig config) => Chunks(chunk, config);