private void WriteContained(PolyLine boundary, string outputfilename, IProgressLog log) { ExcellonFile Out = new ExcellonFile(); foreach (var T in Tools) { Out.Tools[T.Key] = new ExcellonTool() { ID = T.Value.ID, Radius = T.Value.Radius }; foreach (var d in T.Value.Drills) { if (boundary.PointInPoly(new PointD(d.X, d.Y))) { Out.Tools[T.Key].Drills.Add(d); } } foreach (var d in T.Value.Slots) { if (boundary.PointInPoly(d.Start) || boundary.PointInPoly(d.End)) { Out.Tools[T.Key].Slots.Add(d); } } } Out.Write(outputfilename, 0, 0, 0, 0); }
public GitCommandProcessor(WorkerConfiguration configuration, IProgressLog progressLog, IGitHubClientFactory gitHubClientFactory, IGitWrapperFactory gitWrapperFactory) { Configuration = configuration; ProgressLog = progressLog; _gitHubClientFactory = gitHubClientFactory; _gitWrapperFactory = gitWrapperFactory; }
public ITripleCollectionHandler MakeRdfFileGenerator( IResourceFileMapper resourceMap, IEnumerable <Uri> graphFilter, IProgressLog progressLog, int reportInterval) { return(new RdfFileGenerator(resourceMap, graphFilter, progressLog, reportInterval)); }
public RdfFileGenerator(IResourceFileMapper resourceMap, IEnumerable <Uri> graphFilter, IProgressLog progressLog, int reportInterval) { _resourceMap = resourceMap; _graphFilter = graphFilter?.ToList() ?? new List <Uri>(0); _noFilter = _graphFilter.Count == 0; _nquadsFormatter = new NQuads11Formatter(); _progressLog = progressLog; _reportInterval = reportInterval; }
public IResourceStatementHandler MakeHtmlFileGenerator( IDataDockUriService uriService, IResourceFileMapper resourceMap, IViewEngine viewEngine, IProgressLog progressLog, int reportInterval, Dictionary <string, object> addVariables) { return(new HtmlFileGenerator(uriService, resourceMap, viewEngine, progressLog, reportInterval, addVariables)); }
public HtmlFileGenerator(IDataDockUriService uriService, IResourceFileMapper resourceMap, IViewEngine viewEngine, IProgressLog progressLog, int reportInterval, Dictionary <string, object> addVariables) { _resourceMap = resourceMap; _viewEngine = viewEngine; _progressLog = progressLog; _numFilesGenerated = 0; _uriService = uriService; _reportInterval = reportInterval; _addVariables = addVariables ?? new Dictionary <string, object>(); }
public VoidFileGenerator(IViewEngine viewEngine, IQuinceStore quinceStore, Uri repositoryUri, IProgressLog progressLog, Dictionary <string, object> addVariables) { _viewEngine = viewEngine; _quinceStore = quinceStore; _repositoryUri = repositoryUri; _progressLog = progressLog; _graph = new Graph(); _voidSubset = _graph.CreateUriNode(new Uri("http://rdfs.org/ns/void#subset")); _dctermsPublisher = _graph.CreateUriNode(new Uri("http://purl.org/dc/terms/publisher")); _addVariables = addVariables ?? new Dictionary <string, object>(); }
public void AddFileToSet(string aname, IProgressLog Logger, double drillscaler = 1.0) { if (Streams.ContainsKey(aname)) { AddFileToSet(Streams[aname], aname, Logger, drillscaler); } else { Logger.AddString(String.Format("[ERROR] no stream for {0}!!!", aname)); } }
public async Task ProcessJob(JobInfo jobInfo, UserAccount userInfo, IProgressLog progressLog) { ProgressLog = progressLog; JobInfo = jobInfo; var authenticationClaim = userInfo.Claims.FirstOrDefault(c => c.Type.Equals(DataDockClaimTypes.GitHubAccessToken)); AuthenticationToken = authenticationClaim?.Value; if (string.IsNullOrEmpty(AuthenticationToken)) { Log.Error("No authentication token found for user {userId}", userInfo.UserId); ProgressLog.Error("Could not find a valid GitHub access token for this user account. Please check your account settings."); throw new WorkerException("Could not find a valid GitHub access token for this user account. Please check your account settings."); } await RunJob(jobInfo, userInfo); }
public async Task ProcessJob(JobInfo job, UserAccount userAccount, IProgressLog progressLog) { // Delete the schema from documentDB try { progressLog.UpdateStatus(JobStatus.Running, $"Deleting schema {job.SchemaId}"); await _schemaStore.DeleteSchemaAsync(null, job.SchemaId); progressLog.UpdateStatus(JobStatus.Running, "Schema deleted successfully"); } catch (Exception ex) { progressLog.Error("Failed to remove schema record"); Log.Error(ex, "Failed to remove schema record"); throw new WorkerException(ex, "Failed to delete schema record."); } }
public static void MergeAll(List <string> Files, string output, IProgressLog log) { if (Files.Count >= 2) { MultiMerge(Files[0], Files.Skip(1).ToList(), output, log); return; } if (Files.Count < 2) { if (Files.Count == 1) { Console.WriteLine("Merging 1 file is copying... doing so..."); if (File.Exists(output)) { File.Delete(output); } File.Copy(Files[0], output); } else { Console.WriteLine("Need files to do anything??"); } return; } string LastFile = Files[0]; List <string> TempFiles = new List <string>(); for (int i = 1; i < Files.Count - 1; i++) { string NewFile = Path.GetTempFileName(); TempFiles.Add(NewFile); Merge(LastFile, Files[i], NewFile, log); LastFile = NewFile; } Merge(LastFile, Files.Last(), output, log); log.AddString("Removing merge tempfiles"); foreach (string s in TempFiles) { File.Delete(s); } }
/// <summary> /// Create a new repository that updates the local clone of a DataDock GitHub repository /// </summary> /// <param name="targetDirectory">The path to the directory containing the local clone</param> /// <param name="repositoryUri">The base IRI for DataDock graphs in this repository</param> /// <param name="progressLog">The progress logger to report to</param> /// <param name="quinceStoreFactory">a factory for creating an IQuinceStore instance to access the Quince store of the GitHub repository</param> /// <param name="fileFileGeneratorFactory">a factory for creating an <see cref="IFileGeneratorFactory"/> instance to generate the statically published HTML files for the GitHub repository</param> /// <param name="rdfResourceFileMapper">Provides the logic to map resource URIs to the path to the static RDF files for that resource</param> /// <param name="htmlResourceFileMapper">Provides the logic to map resource URIs to the path to the static HTML files for that resource</param> /// <param name="uriService">Provides the logic to generate URIs for DataDock resources</param> public DataDockRepository( string targetDirectory, Uri repositoryUri, IProgressLog progressLog, IQuinceStoreFactory quinceStoreFactory, IFileGeneratorFactory fileFileGeneratorFactory, IResourceFileMapper rdfResourceFileMapper, IResourceFileMapper htmlResourceFileMapper, IDataDockUriService uriService) { _targetDirectory = targetDirectory; _repositoryUri = repositoryUri; _progressLog = progressLog; _quinceStore = quinceStoreFactory.MakeQuinceStore(targetDirectory); _fileGeneratorFactory = fileFileGeneratorFactory; _rdfResourceFileMapper = rdfResourceFileMapper; _htmlResourceFileMapper = htmlResourceFileMapper; _uriService = uriService; }
public void CheckRelativeBoundingBoxes(IProgressLog Logger) { List <ParsedGerber> DrillFiles = new List <ParsedGerber>(); List <ParsedGerber> DrillFilesToReload = new List <ParsedGerber>(); Bounds BB = new Bounds(); foreach (var a in PLSs) { if (a.Layer == BoardLayer.Drill) { DrillFiles.Add(a); } else { BB.AddBox(a.BoundingBox); } } foreach (var a in DrillFiles) { if (a.BoundingBox.Intersects(BB) == false) { Errors.Add(String.Format("Drill file {0} does not seem to touch the main bounding box!", Path.GetFileName(a.Name))); if (Logger != null) { Logger.AddString(String.Format("Drill file {0} does not seem to touch the main bounding box!", Path.GetFileName(a.Name))); } PLSs.Remove(a); } } BoundingBox = new Bounds(); foreach (var a in PLSs) { // Console.WriteLine("Progress: Adding board {6} to box::{0:N2},{1:N2} - {2:N2},{3:N2} -> {4:N2},{5:N2}", a.BoundingBox.TopLeft.X, a.BoundingBox.TopLeft.Y, a.BoundingBox.BottomRight.X, a.BoundingBox.BottomRight.Y, a.BoundingBox.Width(), a.BoundingBox.Height(), Path.GetFileName(a.Name)); //Console.WriteLine("adding box for {0}:{1},{2}", a.Name, a.BoundingBox.Width(), a.BoundingBox.Height()); BoundingBox.AddBox(a.BoundingBox); } }
public static void Merge(string file1, string file2, string outputfile, IProgressLog log) { if (File.Exists(file1) == false) { Console.WriteLine("{0} not found! stopping process!", file1); return; } if (File.Exists(file2) == false) { Console.WriteLine("{0} not found! stopping process!", file2); return; } log.AddString(String.Format("*** Merging {0} with {1}", file1, file2)); Console.WriteLine("*** Reading {0}:", file1); ExcellonFile file1Parsed = new ExcellonFile(); file1Parsed.Load(file1); Console.WriteLine("*** Reading {0}:", file2); ExcellonFile file2Parsed = new ExcellonFile(); file2Parsed.Load(file2); int MaxID = 0; foreach (var D in file1Parsed.Tools) { if (D.Value.ID > MaxID) { MaxID = D.Value.ID + 1; } } foreach (var D in file2Parsed.Tools) { D.Value.ID += MaxID; file1Parsed.Tools[D.Value.ID] = D.Value; } file1Parsed.Write(outputfile, 0, 0, 0, 0); }
public void AddFileToSet(MemoryStream MS, string aname, IProgressLog Logger, double drillscaler = 1.0) { Streams[aname] = MS; ///string[] filesplit = a.Split('.'); bool zerowidth = false; bool precombine = false; BoardSide aSide; BoardLayer aLayer; Gerber.DetermineBoardSideAndLayer(aname, out aSide, out aLayer); if (aLayer == BoardLayer.Outline || (aLayer == BoardLayer.Mill && HasLoadedOutline == false)) { zerowidth = true; precombine = true; } AddBoardToSet(MS, aname, zerowidth, precombine, drillscaler); }
public IDataDockRepository GetRepositoryForJob(JobInfo jobInfo, IProgressLog progressLog) { var repoPath = Path.Combine(_config.RepoBaseDir, jobInfo.JobId); var baseIri = new Uri(_uriService.GetRepositoryUri(jobInfo.OwnerId, jobInfo.RepositoryId)); var resourceBaseIri = new Uri(_uriService.GetIdentifierPrefix(jobInfo.OwnerId, jobInfo.RepositoryId)); var rdfResourceFileMapper = new ResourceFileMapper( new ResourceMapEntry(resourceBaseIri, Path.Combine(repoPath, "data"))); var htmlResourceFileMapper = new ResourceFileMapper( new ResourceMapEntry(resourceBaseIri, Path.Combine(repoPath, "page"))); return(new DataDockRepository( repoPath, baseIri, progressLog, _quinceStoreFactory, _fileGeneratorFactory, rdfResourceFileMapper, htmlResourceFileMapper, _uriService)); }
public static void MergeAll(List <string> files, string output, IProgressLog log) { PositionFile result = new PositionFile(); foreach (string fileName in files) { PositionFile posFile = new PositionFile(); log.AddString(String.Format("Reading {0}", fileName)); posFile.Load(fileName); log.AddString(String.Format("Merging {0}", fileName)); result.Merge(posFile); } log.AddString(String.Format("Writing {0}", output)); if (output.EndsWith(".csv")) { result.WriteCsv(output); } else { result.WriteKicad(output); } }
public static void MergeAll(List <string> files, string output, IProgressLog log) { BOMFile result = null; foreach (string fileName in files) { BOMFile bomFile = new BOMFile(); log.AddString(String.Format("Reading {0}", fileName)); bomFile.Load(fileName); log.AddString(String.Format("Merging {0}", fileName)); if (result == null) { result = bomFile; } else { result.Merge(bomFile); } } log.AddString(String.Format("Writing {0}", output)); result.WriteCsv(output); }
private static void ShowProgressDescriptionAndProgressBar(IProgressLog progressLog, int cursorTop) { Debug.Assert(progressLog != null); Console.CursorTop = cursorTop; var windowWidth = Console.WindowWidth - 1 /* -1 to avoid shaking effect when Console has a small width */; // Show progress description Console.ForegroundColor = ConsoleColor.White; Console.BackgroundColor = ConsoleColor.Black; Console.CursorTop = cursorTop; Console.CursorLeft = 0; var description = progressLog.EstimatedPercentageDone + "% " + progressLog.Description; if (description.Length > windowWidth) { description = description.Substring(0, windowWidth); } Console.Write(description); if (description.Length < windowWidth) { Console.Write(new string(' ', windowWidth - description.Length)); } // Show progress bar Console.CursorTop = cursorTop + 1; Console.CursorLeft = 0; var progressBarWidth = progressLog.EstimatedPercentageDone * windowWidth / 100; Console.BackgroundColor = ConsoleColor.Green; Console.Write(new string(' ', progressBarWidth)); Console.BackgroundColor = ConsoleColor.DarkGray; // Console.Write(new string(' ', windowWidth - progressBarWidth)); Console.BackgroundColor = ConsoleColor.Black; Console.CursorTop = cursorTop; }
private static void MultiMerge(string file1, List <string> otherfiles, string output, IProgressLog log) { if (File.Exists(file1) == false) { Console.WriteLine("{0} not found! stopping process!", file1); return; } foreach (var otherfile in otherfiles) { if (File.Exists(otherfile) == false) { Console.WriteLine("{0} not found! stopping process!", otherfile); return; } } Console.WriteLine("*** Reading {0}:", file1); ExcellonFile file1Parsed = new ExcellonFile(); file1Parsed.Load(file1); List <ExcellonFile> otherFilesParsed = new List <ExcellonFile>(); foreach (var otherfile in otherfiles) { Console.WriteLine("*** Reading {0}:", otherfile); ExcellonFile otherFileParsed = new ExcellonFile(); otherFileParsed.Load(otherfile); otherFilesParsed.Add(otherFileParsed); } int maxID = 0; foreach (var D in file1Parsed.Tools) { if (D.Value.ID > maxID) { maxID = D.Value.ID + 1; } } foreach (var F in otherFilesParsed) { foreach (var D in F.Tools) { file1Parsed.AddToolWithHoles(D.Value);; // D.Value.ID += MaxID; // File1Parsed.Tools[D.Value.ID] = D.Value; } } file1Parsed.Write(output, 0, 0, 0, 0); }
public void FixEagleDrillExportIssues(IProgressLog Logger) { List <ParsedGerber> DrillFiles = new List <ParsedGerber>(); List <Tuple <double, ParsedGerber> > DrillFilesToReload = new List <Tuple <double, ParsedGerber> >(); Bounds BB = new Bounds(); foreach (var a in PLSs) { if (a.Layer == BoardLayer.Drill) { DrillFiles.Add(a); } else { BB.AddBox(a.BoundingBox); } } foreach (var a in DrillFiles) { var b = a.BoundingBox; if (b.Width() > BB.Width() * 1.5 || b.Height() > BB.Height() * 1.5) { var MaxRatio = Math.Max(b.Width() / BB.Width(), b.Height() / BB.Height()); if (Logger != null) { Logger.AddString(String.Format("Note: Really large drillfile found({0})-fix your export scripts!", a.Name)); } Console.WriteLine("Note: Really large drillfile found ({0})- fix your export scripts!", a.Name); DrillFilesToReload.Add(new Tuple <double, ParsedGerber>(MaxRatio, a)); } } foreach (var a in DrillFilesToReload) { PLSs.Remove(a.Item2); var scale = 1.0; if (Double.IsInfinity(a.Item1) || Double.IsNaN(a.Item1)) { Errors.Add("Drill file size reached infinity - ignoring it"); if (Logger != null) { Logger.AddString("Drill file size reached infinity - ignoring it"); } } else { var R = a.Item1; while (R >= 1.5) { R /= 10; scale /= 10; } AddFileToSet(a.Item2.Name, Logger, scale); } } BoundingBox = new Bounds(); foreach (var a in PLSs) { //Console.WriteLine("Progress: Adding board {6} to box::{0:N2},{1:N2} - {2:N2},{3:N2} -> {4:N2},{5:N2}", a.BoundingBox.TopLeft.X, a.BoundingBox.TopLeft.Y, a.BoundingBox.BottomRight.X, a.BoundingBox.BottomRight.Y, a.BoundingBox.Width(), a.BoundingBox.Height(), Path.GetFileName( a.Name)); //Console.WriteLine("adding box for {0}:{1},{2}", a.Name, a.BoundingBox.Width(), a.BoundingBox.Height()); BoundingBox.AddBox(a.BoundingBox); } }
public void AddBoardsToSet(List <string> FileList, bool fixgroup = true, IProgressLog Logger = null) { foreach (var a in FileList) { BoardSide aSide = BoardSide.Unknown; BoardLayer aLayer = BoardLayer.Unknown; string ext = Path.GetExtension(a); if (ext == ".zip") { using (ZipFile zip1 = ZipFile.Read(a)) { foreach (ZipEntry e in zip1) { //MemoryStream MS = new MemoryStream(); if (e.IsDirectory == false) { // e.Extract(MS); // MS.Seek(0, SeekOrigin.Begin); Gerber.DetermineBoardSideAndLayer(e.FileName, out aSide, out aLayer); if (aLayer == BoardLayer.Outline) { HasLoadedOutline = true; } // AddFileStream(MS, e.FileName, drillscaler); } } } } else { Gerber.DetermineBoardSideAndLayer(a, out aSide, out aLayer); } if (aLayer == BoardLayer.Outline) { HasLoadedOutline = true; } } foreach (var a in FileList) { if (Logger != null) { Logger.AddString(String.Format("Loading {0}", Path.GetFileName(a))); } string ext = Path.GetExtension(a); if (ext == ".zip") { using (ZipFile zip1 = ZipFile.Read(a)) { foreach (ZipEntry e in zip1) { if (e.IsDirectory == false) { if (Logger != null) { Logger.AddString(String.Format("Loading inside zip: {0}", Path.GetFileName(e.FileName))); } MemoryStream MS = new MemoryStream(); e.Extract(MS); MS.Seek(0, SeekOrigin.Begin); AddFileToSet(MS, e.FileName, Logger); } } } } else { using (FileStream FS = File.OpenRead(a)) { MemoryStream MS2 = new MemoryStream(); FS.CopyTo(MS2); MS2.Seek(0, SeekOrigin.Begin); AddFileToSet(MS2, a, Logger); } } } if (fixgroup) { if (Logger != null) { Logger.AddString("Checking for common file format mistakes."); } FixEagleDrillExportIssues(Logger); CheckRelativeBoundingBoxes(Logger); CheckForOutlineFiles(Logger); CheckRelativeBoundingBoxes(Logger); } }
public void CheckForOutlineFiles(IProgressLog Logger) { List <ParsedGerber> Outlines = new List <ParsedGerber>(); List <ParsedGerber> Mills = new List <ParsedGerber>(); List <ParsedGerber> Unknowns = new List <ParsedGerber>(); foreach (var a in PLSs) { if (a.Side == BoardSide.Both && (a.Layer == BoardLayer.Outline)) { Outlines.Add(a); } if (a.Side == BoardSide.Both && (a.Layer == BoardLayer.Mill)) { Mills.Add(a); } if (a.Side == BoardSide.Unknown && a.Layer == BoardLayer.Unknown) { Unknowns.Add(a); Errors.Add(String.Format("Unknown file in set:{0}", Path.GetFileName(a.Name))); if (Logger != null) { Logger.AddString(String.Format("Unknown file in set:{0}", Path.GetFileName(a.Name))); } } } if (Outlines.Count == 0) { if (Unknowns.Count == 0) { Errors.Add(String.Format("No outline file found and all other files accounted for! ")); if (Logger != null) { Logger.AddString(String.Format("No outline file found and all other files accounted for! ")); } // if (Mills.Count == 1) // { // Mills[0].Layer = BoardLayer.Outline; // Errors.Add(String.Format("Elevating mill file to outline!")); // if (Logger != null) Logger.AddString(String.Format("Elevating mill file to outline!")); // } // else // if (!InventOutlineFromMill()) { CreateBoxOutline(); } } else { CreateBoxOutline(); return; //InventOutline(); //return; //foreach (var a in Unknowns) //{ // PLSs.Remove(a); // hasgko = true; // a.Layer = BoardLayer.Outline; // a.Side = BoardSide.Both; // Console.WriteLine("Note: Using {0} as outline file", Path.GetFileName(a.Name)); // if (Logger != null) Logger.AddString(String.Format("Note: Using {0} as outline file", Path.GetFileName(a.Name))); // bool zerowidth = true; // bool precombine = true; // var b = AddBoardToSet(a.Name, zerowidth, precombine, 1.0); // b.Layer = BoardLayer.Outline; // b.Side = BoardSide.Both; //} } } }
private static void DeleteCsvAndMetadata(string baseDirectory, string datasetId, IProgressLog progressLog) { Log.Information("DeleteCsvAndMetadata: {baseDirectory}, {datasetId}", baseDirectory, datasetId); try { progressLog.Info("Deleting source CSV and CSV metadata files"); var csvPath = Path.Combine(baseDirectory, "csv", datasetId); Directory.Delete(csvPath, true); } catch (Exception ex) { progressLog.Exception(ex, "Error deleting source CSV and CSV metadata files"); throw; } }
public static void WriteContainedOnly(string inputfile, PolyLine Boundary, string outputfilename, IProgressLog Log) { if (File.Exists(inputfile) == false) { Console.WriteLine("{0} not found! stopping process!", Path.GetFileName(inputfile)); return; } Log.AddString(String.Format("Clipping {0} to {1}", Path.GetFileName(inputfile), Path.GetFileName(outputfilename))); ExcellonFile EF = new ExcellonFile(); EF.Load(inputfile); EF.WriteContained(Boundary, outputfilename, Log); }
public async Task ProcessJob(JobInfo job, UserAccount userAccount, IProgressLog progressLog) { _progressLog = progressLog; // Save the schema to documentDB try { Log.Debug("Create schema. Schema file Id: {schemaFileId}", job.SchemaFileId); _progressLog.UpdateStatus(JobStatus.Running, "Create schema"); // get schema from file store if (!string.IsNullOrEmpty(job.SchemaFileId)) { // Parse the JSON metadata JObject schemaJson; var schemaFileStream = await _jobFileStore.GetFileAsync(job.SchemaFileId); using (var sr = new StreamReader(schemaFileStream)) { using (var jr = new JsonTextReader(sr)) { schemaJson = JObject.Load(jr); } } if (schemaJson != null) { _progressLog.UpdateStatus(JobStatus.Running, "Retrieved DataDock schema file."); MakeRelative(schemaJson, $"{_configuration.PublishUrl}{(_configuration.PublishUrl.EndsWith("/") ? string.Empty : "/")}{job.OwnerId}/{job.RepositoryId}/"); Log.Debug("Create schema: OwnerId: {ownerId} RepositoryId: {repoId} SchemaFileId: {schemaFileId}", job.OwnerId, job.RepositoryId, job.SchemaFileId); var schemaInfo = new SchemaInfo { OwnerId = job.OwnerId, RepositoryId = job.RepositoryId, LastModified = DateTime.UtcNow, SchemaId = Guid.NewGuid().ToString(), Schema = schemaJson, }; _progressLog.UpdateStatus(JobStatus.Running, "Creating schema record."); await _schemaStore.CreateOrUpdateSchemaRecordAsync(schemaInfo); _progressLog.UpdateStatus(JobStatus.Running, "Schema record created successfully."); } else { _progressLog.UpdateStatus(JobStatus.Failed, "Unable to create schema - unable to retrieve schema JSON from temporary file storage"); throw new WorkerException( "Unable to create schema - unable to retrieve schema JSON from temporary file storage"); } } else { _progressLog.UpdateStatus(JobStatus.Failed, "Unable to create schema - missing file Id"); throw new WorkerException("Unable to create schema - missing file Id"); } } catch (Exception ex) { Log.Error(ex, "Failed to update schema record"); _progressLog.UpdateStatus(JobStatus.Failed, "Failed to update schema record"); throw new WorkerException(ex, "Failed to update schema record."); } }
public GitCommandProcessor MakeGitCommandProcessor(IProgressLog progressLog) { return(new GitCommandProcessor(_config, progressLog, _clientFactory, _wrapperFactory)); }