public static async Task <string> GetCombinedStudyScalarDataCsv( IRootFolder root, IFileWriter writer, StudyScalarFiles studyScalarFiles) { if (studyScalarFiles.ScalarResults == null || studyScalarFiles.ScalarInputs == null) { return(null); } var results = new List <ScalarResultItem>(); var metadata = new List <ScalarMetadataItem>(); var scalarResultsFile = studyScalarFiles.ScalarResults; var scalarMetadataFile = studyScalarFiles.ScalarMetadata; var(scalarResults, scalarMetadata, jobIndexColumn) = await GetStudyScalarResultsFileContent(scalarResultsFile, scalarMetadataFile); if (scalarResults.Count == 0 || scalarMetadata.Count == 0 || jobIndexColumn == null) { return(null); } var scalarInputs = await LoadScalarResults(studyScalarFiles.ScalarInputs); var scalarInputsMetadata = await LoadScalarInputsMetadata(studyScalarFiles.ScalarInputsMetadata); scalarInputsMetadata = scalarInputsMetadata.Skip(1).ToList(); // Skip the column titles row. scalarInputs = scalarInputs.Select((v, i) => v.WithMetadata(scalarInputsMetadata?.Count > i ? scalarInputsMetadata[i] : null)).ToList(); // Now we add the job index, then the scalar inputs, then the scalar results. results.Add(jobIndexColumn); results.AddRange(scalarInputs); results.AddRange(scalarResults); metadata.AddRange(scalarInputsMetadata); metadata.AddRange(scalarMetadata); var csv = new StringBuilder(); csv.AppendLine(string.Join(",", results.Select(r => r.Metadata?.Description?.WithQuotes() ?? string.Empty))); csv.AppendLine(string.Join(",", results.Select(r => (r.Metadata?.FullName ?? r.Name).WithQuotes()))); csv.AppendLine(string.Join(",", results.Select(r => r.Metadata?.Units?.WithQuotes() ?? string.Empty))); for (int resultDataIndex = 0; resultDataIndex < jobIndexColumn.Data.Count; resultDataIndex++) { var jobIndex = (int)jobIndexColumn.Data[resultDataIndex].ParseJavascriptDouble(); var lineData = new List <string> { jobIndex.ToJavascriptString() } .Concat(scalarInputs.Select(v => v.Data[jobIndex].WithQuotes())) .Concat(scalarResults.Select(v => v.Data[resultDataIndex].WithQuotes())).ToList(); csv.AppendLine(string.Join(",", lineData)); } return(csv.ToString()); }
/** * AppData constructor. * * @param IRootFolder rootFolder * @param SystemConfig systemConfig * @param string appId */ public AppData(IRootFolder rootFolder, SystemConfig systemConfig, string appId) { this.rootFolder = rootFolder; this.config = systemConfig; this.appId = appId; this.folders = new CappedMemoryCache(); }
/** * PreviewManager constructor. * * @param IConfig config * @param IRootFolder rootFolder * @param IAppData appData * @param EventDispatcherInterface eventDispatcher * @param string userId */ public PreviewManager(IConfig config, IRootFolder rootFolder, IAppData appData, EventDispatcherInterface eventDispatcher, string userId) { this.config = config; this.rootFolder = rootFolder; this.appData = appData; this.eventDispatcher = eventDispatcher; this.userId = userId; }
public static async Task ExecuteAsync( IRootFolder root, IFileWriter writer, StudyScalarFiles studyScalarFiles) { var content = await GetCombinedStudyScalarDataCsv(root, writer, studyScalarFiles); if (content != null) { var bytes = Encoding.UTF8.GetBytes(content.ToString()); await writer.WriteNewFile(root, string.Empty, "scalar-results-merged.csv", bytes); } }
public async Task ExecuteAsync( IRootFolder root, IFileWriter writer, bool channelsAsCsv, bool deleteProcessedFiles, int parallelism) { var studyScalarFiles = new StudyScalarFiles(); var channelDataColumns = new ChannelDataColumns(); var allFiles = await root.GetFilesAsync(); var filesToWrite = new List <IFile>(); foreach (var file in allFiles) { try { TryAddFileToStudyScalarResults.Execute(file, studyScalarFiles); if (channelsAsCsv && TryGetChannelMetadata.Execute(file, out var channelMetadata)) { channelDataColumns.Add(new CsvColumn(channelMetadata, file)); } else { filesToWrite.Add(file); } } catch (Exception t) { Console.WriteLine(); Console.WriteLine("Failed to process file: " + file); Console.WriteLine(t); if (t is AbortProcessingException) { throw; } } } await filesToWrite.ForEachAsync(parallelism, async file => { await writer.WriteExistingFile(root, file); }); await WriteChannelDataAsCsv.ExecuteAsync(root, writer, deleteProcessedFiles, parallelism, channelDataColumns); await WriteCombinedStudyScalarData.ExecuteAsync(root, writer, studyScalarFiles); }
private static async Task <SimTypeMetadataResult> GetSimTypeMetadataAsync( IRootFolder baseDirectory, string relativePathToFile, string simType) { var lookup = new Dictionary <string, SimTypeMetadataRow>(); try { var text = await baseDirectory.GetContentAsTextAsync( relativePathToFile, simType + "_VectorMetadata.csv"); if (text != null) { var lines = text.SplitLines(); foreach (var line in lines.Skip(1)) { try { var values = line.SplitCsvLine().ToList(); var units = values.Count >= 2 ? values[1].WithoutQuotes() : null; var xDomain = values.Count >= 5 ? values[4].WithoutQuotes() : null; var pointsInChannel = 0; if (values.Count >= 4) { var pointsInChannelString = values[3].WithoutQuotes(); int.TryParse(pointsInChannelString, out pointsInChannel); } lookup[values[0].WithoutQuotes()] = new SimTypeMetadataRow(units, xDomain, pointsInChannel); } catch (Exception t) { // Ignore any errors. Console.WriteLine(t); } } } } catch (Exception t) { // Ignore any errors. Console.WriteLine(t); } return(new SimTypeMetadataResult(lookup)); }
private void SaveBidsAgnosticFiles(IRootFolder rootFolder, DatasetDescription datasetDescription, string changesFileContent, string readmeFileContent) { if (datasetDescription != null) { if (File.Exists(rootFolder.DatasetDescriptionFilePath)) { Warnings.Add($"{WarningFileSkipped}{rootFolder.DatasetDescriptionFilePath}"); } else { rootFolder.SaveDatasetDescriptionFile(datasetDescription); } } if (changesFileContent != null) { if (File.Exists(rootFolder.ChangesFilePath)) { Warnings.Add($"{WarningFileSkipped}{rootFolder.ChangesFilePath}"); } else { rootFolder.SaveChangesFile(changesFileContent); } } if (readmeFileContent != null) { if (File.Exists(rootFolder.ReadmeFilePath)) { Warnings.Add($"{WarningFileSkipped}{rootFolder.ReadmeFilePath}"); } else { rootFolder.SaveReadmeFile(readmeFileContent); } } }
public Factory(IRootFolder rootFolder, SystemConfig systemConfig) { this.rootFolder = rootFolder; this.config = systemConfig; }
public Task DeleteProcessedFile(IRootFolder root, IFile file) { File.Delete(file.FullPath); return(Task.CompletedTask); }
public Task WriteNewFile(IRootFolder root, string relativePathToFile, string fileName, byte[] data) { LocalFolder.AssertRelativePathNotSupplied(relativePathToFile); File.WriteAllBytes(Path.Combine(((LocalFolder)root).FolderPath, fileName), data); return(Task.CompletedTask); }
public Task WriteExistingFile(IRootFolder root, IFile file) { return(Task.CompletedTask); }
public static async Task ExecuteAsync( IRootFolder root, IFileWriter writer, bool deleteProcessedFiles, int parallelism, ChannelDataColumns channelDataColumns) { foreach (var simType in channelDataColumns.SimTypes) { var columns = channelDataColumns.GetColumns(simType); var folderGroups = columns.GroupBy(v => v.File.RelativePathToFile); foreach (var folderGroup in folderGroups) { var relativePathToFile = folderGroup.Key; var metadata = await GetSimTypeMetadataAsync(root, relativePathToFile, simType); var xDomainGroups = folderGroup.GroupBy(v => metadata.GetChannelXDomain(v.Metadata.ChannelName)); foreach (var xDomainGroup in xDomainGroups) { var xDomain = xDomainGroup.Key.Trim(); var fileSuffix = "_" + (string.IsNullOrWhiteSpace(xDomain) ? "Unspecified" : xDomain); var resolvedData = new ConcurrentQueue <ResolvedCsvColumn>(); await xDomainGroup.ForEachAsync( parallelism, async column => { try { var pointsInChannel = metadata.GetPointsInChannel(column.Metadata.ChannelName); var buffer = await column.File.GetContentAsBytesAsync(); if (buffer.Length == pointsInChannel * 4) { var floatValues = new float[buffer.Length / sizeof(float)]; Buffer.BlockCopy(buffer, 0, floatValues, 0, buffer.Length); var values = new double[floatValues.Length]; for (int i = 0; i < floatValues.Length; i++) { values[i] = (double)floatValues[i]; } resolvedData.Enqueue( new ResolvedCsvColumn(column.File, column.Metadata.ChannelName, values)); } else { var values = new double[buffer.Length / sizeof(double)]; Buffer.BlockCopy(buffer, 0, values, 0, buffer.Length); resolvedData.Enqueue( new ResolvedCsvColumn(column.File, column.Metadata.ChannelName, values)); } } catch (Exception t) { writer.ReportError( "Failed to parse file: " + column.File.FullPath, t); } }); var data = resolvedData.ToList(); if (data.Count > 0) { // Always put sLap at the start for ATLAS compatibility. const string AtlasPrimaryChannel = "tRun"; data.Sort((a, b) => { if (a == b) { return(0); } if (a.ChannelName == AtlasPrimaryChannel) { return(-1); } if (b.ChannelName == AtlasPrimaryChannel) { return(1); } return(String.Compare(a.ChannelName, b.ChannelName, StringComparison.OrdinalIgnoreCase)); }); var maxDataLength = data.Select(v => v.Data.Length).Max(); var csv = new StringBuilder(); csv.AppendLine(relativePathToFile + simType); csv.AppendLine(string.Join(",", data.Select(v => v.ChannelName))); csv.AppendLine(string.Join(",", data.Select(v => { var units = metadata.GetChannelUnits(v.ChannelName); if (string.IsNullOrWhiteSpace(units)) { return("\"()\""); } return("\"" + units + "\""); }))); for (int i = 0; i < maxDataLength; i++) { csv.AppendLine( string.Join( ",", data.Select(v => v.Data.Length > i ? v.Data[i].NumericOrNaN().ToString(CultureInfo.InvariantCulture) : "").ToList())); } var bytes = Encoding.UTF8.GetBytes(csv.ToString()); var fileName = simType + "_VectorResults" + fileSuffix + ".csv"; if (string.IsNullOrWhiteSpace(relativePathToFile)) { Console.WriteLine($"Writing '{fileName}'."); } else { Console.WriteLine($"Writing '{fileName}' to '{relativePathToFile}'."); } await writer.WriteNewFile(root, relativePathToFile, simType + "_VectorResults" + fileSuffix + ".csv", bytes); if (deleteProcessedFiles) { foreach (var column in data) { await writer.DeleteProcessedFile(root, column.File); } } } } } } }
public void CreateParent() { _root = new GenericRootFolder("Foo", ContentModelMockObjectFactory.CreateObjectPersistor()); _parent = new GenericFolder(_root, "Bar"); }
public IFileSystemObject GetFileSystemObjectByPath(IRootFolder root, String path) { //Search for an IFileSystemObject by path _logger.Debug("LogMsg.GettingFsoByPath", _path, root.Source, path); Query qry = _container.query(); qry.constrain(typeof(IFileSystemObject)); qry.descend("Path").constrain(path); qry.descend("RootFolder").constrain(root); ObjectSet set = qry.execute(); if (set.hasNext()) { _logger.Debug("LogMsg.FoundFsoByPath", _path); IFileSystemObject fso = (IFileSystemObject)set.next(); fso.Activate(); return fso; } else { _logger.Debug("LogMsg.NotFoundFsoByPath", _path); return null; } }
public void CreateParent() { _root = new GenericRootFolder("Foo", ContentModelMockObjectFactory.CreateObjectPersistor()); _parent = new GenericFolder(_root, "Bar"); _doc = new GenericDocument(_parent, "Boo", "text/plain"); }
public File(Util util, IRootFolder rootFolder, OCP.ShareNS.IManager shareManager) { this.util = util; this.rootFolder = rootFolder; this.shareManager = shareManager; }
public void CreateRoot() { _root = new GenericRootFolder("Foo", ContentModelMockObjectFactory.CreateObjectPersistor()); }