public void AddFile(ProgressLog log, string L) { var T = GerberLibrary.Gerber.FindFileType(L); switch (T) { case GerberLibrary.Core.BoardFileType.Drill: { GerberLibrary.ExcellonFile EF = new GerberLibrary.ExcellonFile(); EF.Load(log, L); DrillCount += EF.TotalDrillCount(); } break; case GerberLibrary.Core.BoardFileType.Gerber: { GerberLibrary.Core.BoardSide Side; GerberLibrary.Core.BoardLayer Layer; GerberLibrary.Gerber.DetermineBoardSideAndLayer(L, out Side, out Layer); if (Layer == GerberLibrary.Core.BoardLayer.Outline || Layer == GerberLibrary.Core.BoardLayer.Mill) { var G = GerberLibrary.PolyLineSet.LoadGerberFile(L); Box.AddBox(G.BoundingBox); } else { var G = GerberLibrary.PolyLineSet.LoadGerberFile(L); } } break; } }
public void FixEagleDrillExportIssues(ProgressLog Logger) { List<ParsedGerber> DrillFiles = new List<ParsedGerber>(); List<Tuple<double, ParsedGerber>> DrillFilesToReload = new List<Tuple<double, ParsedGerber>>(); Bounds BB = new Bounds(); foreach (var a in PLSs) { if (a.Layer == BoardLayer.Drill) { DrillFiles.Add(a); } else { BB.AddBox(a.BoundingBox); } } foreach (var a in DrillFiles) { var b = a.BoundingBox; if (b.Width() > BB.Width() * 1.5 || b.Height() > BB.Height() * 1.5) { var MaxRatio = Math.Max(b.Width() / BB.Width(), b.Height() / BB.Height()); if (Logger != null) Logger.AddString(String.Format("Note: Really large drillfile found({0})-fix your export scripts!", a.Name)); Console.WriteLine("Note: Really large drillfile found ({0})- fix your export scripts!", a.Name); DrillFilesToReload.Add(new Tuple<double, ParsedGerber>(MaxRatio, a)); } } foreach (var a in DrillFilesToReload) { PLSs.Remove(a.Item2); var scale = 1.0; if (Double.IsInfinity(a.Item1) || Double.IsNaN(a.Item1)) { Errors.Add("Drill file size reached infinity - ignoring it"); if (Logger != null) Logger.AddString("Drill file size reached infinity - ignoring it"); } else { var R = a.Item1; while (R >= 1.5) { R /= 10; scale /= 10; } AddFileToSet(a.Item2.Name, Logger, scale); } } BoundingBox = new Bounds(); foreach (var a in PLSs) { //Console.WriteLine("Progress: Adding board {6} to box::{0:N2},{1:N2} - {2:N2},{3:N2} -> {4:N2},{5:N2}", a.BoundingBox.TopLeft.X, a.BoundingBox.TopLeft.Y, a.BoundingBox.BottomRight.X, a.BoundingBox.BottomRight.Y, a.BoundingBox.Width(), a.BoundingBox.Height(), Path.GetFileName( a.Name)); //Console.WriteLine("adding box for {0}:{1},{2}", a.Name, a.BoundingBox.Width(), a.BoundingBox.Height()); BoundingBox.AddBox(a.BoundingBox); } }
protected override async Task RunJob(JobInfo jobInfo, UserAccount userInfo) { var targetDirectory = Path.Combine(Configuration.RepoBaseDir, jobInfo.JobId); Log.Information("Using local directory {localDirPath}", targetDirectory); Log.Information("Clone Repository: {gitRepositoryUrl} => {targetDirectory}", jobInfo.GitRepositoryUrl, targetDirectory); await _git.CloneRepository(jobInfo.GitRepositoryUrl, targetDirectory, AuthenticationToken, userInfo); var datasetIri = new Uri(jobInfo.DatasetIri); DeleteCsvAndMetadata(targetDirectory, jobInfo.DatasetId, ProgressLog); var dataDockRepository = _repositoryFactory.GetRepositoryForJob(jobInfo, ProgressLog); dataDockRepository.DeleteDataset(datasetIri); await UpdateHtmlPagesAsync(dataDockRepository, null); if (await _git.CommitChanges(targetDirectory, $"Deleted dataset {datasetIri}", userInfo)) { await _git.PushChanges(jobInfo.GitRepositoryUrl, targetDirectory, AuthenticationToken); } try { await _datasetStore.DeleteDatasetAsync(jobInfo.OwnerId, jobInfo.RepositoryId, jobInfo.DatasetId); } catch (Exception ex) { Log.Error(ex, "Failed to remove dataset record."); throw new WorkerException(ex, "Failed to remove dataset record. Your repository is updated but the dataset may still show in the main lodlab portal"); } Log.Information("Dataset Deleted: {OwnerId}/RepositoryId/{DatasetId}", jobInfo.OwnerId, jobInfo.RepositoryId, jobInfo.DatasetId); ProgressLog.DatasetDeleted(jobInfo.OwnerId, jobInfo.RepositoryId, jobInfo.DatasetId); }
public void AddFile(ProgressLog log, MemoryStream L, string filename) { L.Seek(0, SeekOrigin.Begin); var T = GerberLibrary.Gerber.FindFileTypeFromStream(new StreamReader(L), filename); switch (T) { case GerberLibrary.Core.BoardFileType.Drill: { GerberLibrary.ExcellonFile EF = new GerberLibrary.ExcellonFile(); L.Seek(0, SeekOrigin.Begin); EF.Load(log, new StreamReader(L)); DrillCount += EF.TotalDrillCount(); } break; case GerberLibrary.Core.BoardFileType.Gerber: { GerberLibrary.Core.BoardSide Side; GerberLibrary.Core.BoardLayer Layer; GerberLibrary.Gerber.DetermineBoardSideAndLayer(filename, out Side, out Layer); if (Layer == GerberLibrary.Core.BoardLayer.Outline || Layer == GerberLibrary.Core.BoardLayer.Mill) { L.Seek(0, SeekOrigin.Begin); var G = GerberLibrary.PolyLineSet.LoadGerberFileFromStream(new StreamReader(L), filename); Box.AddBox(G.BoundingBox); } } break; } }
public void AddFile(ProgressLog log, string filename, double drillscaler = 1.0) { string[] filesplit = filename.Split('.'); string ext = filesplit[filesplit.Count() - 1].ToLower(); if (ext == "zip") { using (ZipFile zip1 = ZipFile.Read(filename)) { foreach (ZipEntry e in zip1) { MemoryStream MS = new MemoryStream(); if (e.IsDirectory == false) { e.Extract(MS); MS.Seek(0, SeekOrigin.Begin); AddFileStream(log, MS, e.FileName, drillscaler); } } } return; } MemoryStream MS2 = new MemoryStream(); FileStream FS = File.OpenRead(filename); FS.CopyTo(MS2); MS2.Seek(0, SeekOrigin.Begin); AddFileStream(log, MS2, filename, drillscaler); }
private void WriteContained(PolyLine boundary, string outputfilename, ProgressLog log) { ExcellonFile Out = new ExcellonFile(); foreach (var T in Tools) { Out.Tools[T.Key] = new ExcellonTool() { ID = T.Value.ID, Radius = T.Value.Radius }; foreach (var d in T.Value.Drills) { if (boundary.PointInPoly(new PointD(d.X, d.Y))) { Out.Tools[T.Key].Drills.Add(d); } } foreach (var d in T.Value.Slots) { if (boundary.PointInPoly(d.Start) || boundary.PointInPoly(d.End)) { Out.Tools[T.Key].Slots.Add(d); } } } Out.Write(outputfilename, 0, 0, 0, 0); }
private async Task AddCsvFilesToRepository(string repositoryDirectory, string datasetId, string csvFileName, string csvFileId, string csvmFileId) { try { ProgressLog.Info("Copying source CSV and metadata files to repository directory csv/{0}", datasetId); var datasetCsvDirPath = Path.Combine(repositoryDirectory, "csv", datasetId); if (!Directory.Exists(datasetCsvDirPath)) { Directory.CreateDirectory(datasetCsvDirPath); } var csvFilePath = Path.Combine(datasetCsvDirPath, csvFileName); var csvFileStream = await _jobFileStore.GetFileAsync(csvFileId); await using (var csvOutStream = File.Open(csvFilePath, FileMode.Create, FileAccess.Write)) { csvFileStream.CopyTo(csvOutStream); } if (csvmFileId != null) { var csvmFilePath = csvFilePath + "-metadata.json"; var csvmFileStream = await _jobFileStore.GetFileAsync(csvmFileId); await using var csvmOutStream = File.Open(csvmFilePath, FileMode.Create, FileAccess.Write); csvmFileStream.CopyTo(csvmOutStream); } } catch (Exception ex) { Log.Error(ex, "Failed to copy CSV/CSVM files"); throw new WorkerException(ex, "Failed to copy CSV/CSVM files from upload to Github repository."); } }
private void RemapPair(ProgressLog log, string from, string to, bool overridevalue) { log.PushActivity("RemapPair"); if (from != to) { var F = FindEntry(from, overridevalue); var T = FindEntry(to, overridevalue); if (F != null) { if (T != null) { foreach (var rd in F.RefDes) { T.RefDes.Add(rd); } F.RefDes.Clear(); RemoveEntry(F); } else { log.AddString(String.Format("From found, but no To: {0}", from)); F.SetCombined(to); } } } log.PopActivity(); }
public ParsedGerber AddBoardToSet(ProgressLog log, string _originalfilename, bool forcezerowidth = false, bool precombinepolygons = false, double drillscaler = 1.0) { if (Streams.ContainsKey(_originalfilename)) { return(AddBoardToSet(log, Streams[_originalfilename], _originalfilename, forcezerowidth, precombinepolygons, drillscaler)); } return(null); }
public void Load(ProgressLog log, string filename, double drillscaler = 1.0) { var Load = log.PushActivity("Loading Excellon"); var lines = File.ReadAllLines(filename); ParseExcellon(lines.ToList(), drillscaler, log); log.PopActivity(Load); }
public void Fail(Stack <KeyValuePair <string, SequenceItemActions.ISequenceItemAction> > breadcrumbs, SequenceItem failWith) { ProgressLog.Clear(); var msg = $"{runid,padWidth} - Fail:{failWith.command}"; Logger.Error(msg); Console.WriteLine(msg.Trim()); }
public void Success(SequenceItem successWith) { ProgressLog.Clear(); var msg = $"{runid,padWidth} - Success:{successWith.command}"; Logger.Info(msg); Console.WriteLine(msg.Trim()); }
public ActionResult DeleteConfirmed(int id) { ProgressLog progressLog = db.ProgressLogs.Find(id); db.ProgressLogs.Remove(progressLog); db.SaveChanges(); return(RedirectToAction("Index")); }
public void AddFileStream(ProgressLog log, MemoryStream S, string origfilename, double drillscaler = 1.0) { var FileType = Gerber.FindFileTypeFromStream(new StreamReader(S), origfilename); S.Seek(0, SeekOrigin.Begin); if (FileType == BoardFileType.Unsupported) { return; } ParsedGerber PLS; GerberParserState State = new GerberParserState() { PreCombinePolygons = false }; if (FileType == BoardFileType.Drill) { if (Gerber.ExtremelyVerbose) { Console.WriteLine("Log: Drill file: {0}", origfilename); } PLS = PolyLineSet.LoadExcellonDrillFileFromStream(log, new StreamReader(S), origfilename, false, drillscaler); S.Seek(0, SeekOrigin.Begin); // ExcellonFile EF = new ExcellonFile(); // EF.Load(a); } else { bool forcezerowidth = false; bool precombinepolygons = false; BoardSide Side = BoardSide.Unknown; BoardLayer Layer = BoardLayer.Unknown; Gerber.DetermineBoardSideAndLayer(origfilename, out Side, out Layer); if (Layer == BoardLayer.Outline || Layer == BoardLayer.Mill) { forcezerowidth = true; precombinepolygons = true; } State.PreCombinePolygons = precombinepolygons; PLS = PolyLineSet.LoadGerberFileFromStream(log, new StreamReader(S), origfilename, forcezerowidth, false, State); S.Seek(0, SeekOrigin.Begin); PLS.Side = Side; PLS.Layer = Layer; } Gerbers.Add(new DisplayGerber() { File = PLS, visible = true, sortindex = Gerber.GetDefaultSortOrder(PLS.Side, PLS.Layer), Color = Colors.GetDefaultColor(PLS.Layer, PLS.Side) }); }
private Graph GenerateDefinitionsGraph(JObject metadataJson) { var definitionsGraph = new Graph(); var metadataExtractor = new MetdataExtractor(); ProgressLog.Info("Extracting column property definitions"); metadataExtractor.GenerateColumnDefinitions(metadataJson, definitionsGraph); return(definitionsGraph); }
public NeuralNetTrainer(INeuralNet net, Tuple <double[], double[]>[] dataSets, int?epochCount, double?tolerableError, double?learnRate, int?logCycle, ProgressLog logMethod) : this(net, dataSets, epochCount, tolerableError, learnRate) { if (logCycle.HasValue) { ErrorWriteCycle = logCycle.Value; } Log = logMethod; }
private void PerformAction(bool export) { try { SetCancelEnabled(true); if (export) { string extension = System.IO.Path.GetExtension(SaveFileName); if (extension.Equals(".rm2", StringComparison.OrdinalIgnoreCase)) { RM2Export.SaveToFile(SaveFileName, Document, this); } else if (extension.Equals(".rmesh", StringComparison.OrdinalIgnoreCase)) { RMeshExport.SaveToFile(SaveFileName, Document, this); } else if (extension.Equals(".fbx", StringComparison.OrdinalIgnoreCase) || extension.Equals(".obj", StringComparison.OrdinalIgnoreCase) || extension.Equals(".dae", StringComparison.OrdinalIgnoreCase) || extension.Equals(".stl", StringComparison.OrdinalIgnoreCase) || extension.Equals(".ply", StringComparison.OrdinalIgnoreCase)) { GenericExport.SaveToFile(SaveFileName, Document, this, extension.Substring(1)); } else { throw new Exception($"Unknown file extension ({extension})"); } } else { Lightmap.Lightmapper.Render(Document, this, out _, out _); } } catch (ThreadAbortException) { foreach (Thread thread in (Lightmap.Lightmapper.FaceRenderThreads ?? Enumerable.Empty <Thread>())) { if (thread.IsAlive) { thread.Abort(); } } ProgressLog.Invoke((MethodInvoker)(() => ProgressLog.AppendText("\nCancelled by the user"))); ProgressBar.Invoke((MethodInvoker)(() => ProgressBar.Value = 0)); } catch (Exception e) { ProgressLog.Invoke((MethodInvoker)(() => { ProgressLog.SelectionStart = ProgressLog.TextLength; ProgressLog.SelectionLength = 0; ProgressLog.SelectionColor = Color.Red; ProgressLog.AppendText("\nError: " + e.Message + "\n" + e.StackTrace); ProgressLog.SelectionColor = ProgressLog.ForeColor; })); ProgressBar.Invoke((MethodInvoker)(() => ProgressBar.Value = 0)); } finally { SetCancelEnabled(false); } }
public void Load(ProgressLog log, StreamReader stream, double drillscaler = 1.0) { List <string> lines = new List <string>(); while (!stream.EndOfStream) { lines.Add(stream.ReadLine()); } ParseExcellon(lines, drillscaler, log); }
public ActionResult Edit([Bind(Include = "Id,ProgressId,ShiftId,PrimaryLine,CompletedDate,CompletedHour,Counts,EquivalentCount,CumulativeCount,Issue,Comment,EntDate")] ProgressLog progressLog) { if (ModelState.IsValid) { db.Entry(progressLog).State = EntityState.Modified; db.SaveChanges(); return(RedirectToAction("Index")); } return(View(progressLog)); }
public void AddFileToSet(string aname, ProgressLog Logger, double drillscaler = 1.0) { if (Streams.ContainsKey(aname)) { AddFileToSet(Streams[aname], aname, Logger, drillscaler); } else { Logger.AddString(String.Format("[ERROR] no stream for {0}!!!", aname)); } }
/// <summary> /// Construct double array trie which is equivalent to input trie /// </summary> /// <param name="trie">normal trie, which contains all dictionary words</param> public void Build(Trie trie) { ProgressLog.Begin("building " + (Compact ? "compact" : "sparse") + " trie"); BaseBuffer = new int[BaseCheckInitialSize]; BaseBuffer[0] = 1; CheckBuffer = new int[BaseCheckInitialSize]; TailBuffer = new char[TailInitialSize]; Add(-1, 0, trie.Root); ReportUtilizationRate(); ProgressLog.End(); }
private static GerberImageCreator LoadGerberZip(string v, ProgressLog log) { log.PushActivity("LoadingGerbers"); GerberImageCreator GIC = new GerberImageCreator(); List <String> Files = new List <string>(); Files.Add(v); GIC.AddBoardsToSet(Files, log, true, false); // log.AddString(GIC.GetOutlineBoundingBox().ToString()); log.PopActivity(); return(GIC); }
private bool InventOutline(ProgressLog log) { double largest = 0; ParsedGerber Largest = null; PolyLine Outline = null; foreach (var a in PLSs) { var P = a.FindLargestPolygon(); if (P != null) { if (P.Item1 > largest) { largest = P.Item1; Largest = a; Outline = P.Item2; } } } if (largest < BoundingBox.Area() / 3.0) { return(false); } bool zerowidth = true; bool precombine = true; log.AddString(String.Format("Note: Using {0} to extract outline file", Path.GetFileName(Largest.Name))); if (Largest.Layer == BoardLayer.Mill) { Largest.OutlineShapes.Remove(Outline); Largest.Shapes.Remove(Outline); } var b = AddBoardToSet(log, Largest.Name, zerowidth, precombine, 1.0); b.Layer = BoardLayer.Outline; b.Side = BoardSide.Both; b.DisplayShapes.Clear(); b.OutlineShapes.Clear(); b.Shapes.Clear(); Outline.Close(); b.Shapes.Add(Outline); b.OutlineShapes.Add(Outline); //b.DisplayShapes.Add(Outline); //b.BuildBoundary(); b.FixPolygonWindings(); b.CalcPathBounds(); return(true); }
private static void RotateFile(ProgressLog log, string filename, string outfile, string[] args) { double dx = 0; double dy = 0; double cx = 0; double cy = 0; double angle = 0; if (args.Count() > 2) { double.TryParse(args[2], out dx); } if (args.Count() > 3) { double.TryParse(args[3], out dy); } if (args.Count() > 4) { double.TryParse(args[4], out cx); } if (args.Count() > 5) { double.TryParse(args[5], out cy); } if (args.Count() > 6) { double.TryParse(args[6], out angle); } var T = Gerber.FindFileType(filename); if (T == BoardFileType.Drill) { ExcellonFile EF = new ExcellonFile(); EF.Load(log, filename); EF.Write(outfile, dx, dy, cx, cy, angle); } else { BoardSide Side; BoardLayer Layer; Gerber.DetermineBoardSideAndLayer(args[0], out Side, out Layer); GerberTransposer.Transform(log, filename, outfile, dx, dy, cx, cy, angle); var lines = PolyLineSet.SanitizeInputLines(System.IO.File.ReadAllLines(args[0]).ToList()); System.IO.File.WriteAllLines(args[0] + "sanit.txt", lines); Gerber.SaveGerberFileToImage(log, outfile, outfile + "_render.png", 200, Color.Black, Color.White); } }
void ReportUtilizationRate() { var zeros = 0; for (int i = 0; i < MaxBaseCheckIndex; i++) { if (BaseBuffer[i] == 0) { zeros++; } } ProgressLog.Println("trie memory utilization ratio (" + (!Compact ? "not " : "") + "compacted): " + ((MaxBaseCheckIndex - zeros) / (float)MaxBaseCheckIndex)); }
public static Bounds GetBoundingBox(ProgressLog log, List <string> generatedFiles) { Bounds A = new Bounds(); foreach (var a in generatedFiles) { ParsedGerber PLS = PolyLineSet.LoadGerberFile(log, a, State: new GerberParserState() { PreCombinePolygons = false }); A.AddBox(PLS.BoundingBox); } return(A); }
// GET: ProgressLogs/Edit/5 public ActionResult Edit(int?id) { if (id == null) { return(new HttpStatusCodeResult(HttpStatusCode.BadRequest)); } ProgressLog progressLog = db.ProgressLogs.Find(id); if (progressLog == null) { return(HttpNotFound()); } return(View(progressLog)); }
public static void MergeAll(List <string> Files, string output, ProgressLog Log) { var LogDepth = Log.PushActivity("Excellon MergeAll"); if (Files.Count >= 2) { MultiMerge(Files[0], Files.Skip(1).ToList(), output, Log); Log.PopActivity(LogDepth); return; } if (Files.Count < 2) { if (Files.Count == 1) { Log.AddString("Merging 1 file is copying... doing so..."); if (File.Exists(output)) { File.Delete(output); } File.Copy(Files[0], output); } else { Log.AddString("Need files to do anything??"); } Log.PopActivity(LogDepth); return; } string LastFile = Files[0]; List <string> TempFiles = new List <string>(); for (int i = 1; i < Files.Count - 1; i++) { string NewFile = Path.GetTempFileName(); TempFiles.Add(NewFile); Merge(LastFile, Files[i], NewFile, Log); LastFile = NewFile; } Merge(LastFile, Files.Last(), output, Log); Log.AddString("Removing merge tempfiles"); foreach (string s in TempFiles) { File.Delete(s); } Log.PopActivity(LogDepth); }
void ExtendBuffers(int nextIndex) { var newLength = nextIndex + (int)(BaseBuffer.Length * BufferGrowthPercentage); ProgressLog.Println("Buffers extended to " + BaseBuffer.Length + " entries"); var tmp = BaseBuffer; Array.Resize(ref tmp, newLength); BaseBuffer = tmp; tmp = CheckBuffer; Array.Resize(ref tmp, newLength); CheckBuffer = tmp; }
public static void Merge(string file1, string file2, string outputfile, ProgressLog Log) { Log.PushActivity("Excellon Merge"); if (File.Exists(file1) == false) { Log.AddString(String.Format("{0} not found! stopping process!", file1)); Log.PopActivity(); return; } if (File.Exists(file2) == false) { Log.AddString(String.Format("{0} not found! stopping process!", file2)); Log.PopActivity(); return; } Log.AddString(String.Format("Reading {0}:", file1)); ExcellonFile File1Parsed = new ExcellonFile(); File1Parsed.Load(Log, file1); Log.AddString(String.Format("Reading {0}:", file2)); ExcellonFile File2Parsed = new ExcellonFile(); File2Parsed.Load(Log, file2); Log.AddString(String.Format("Merging {0} with {1}", file1, file2)); int MaxID = 0; foreach (var D in File1Parsed.Tools) { if (D.Value.ID > MaxID) { MaxID = D.Value.ID + 1; } } foreach (var D in File2Parsed.Tools) { D.Value.ID += MaxID; File1Parsed.Tools[D.Value.ID] = D.Value; } File1Parsed.Write(outputfile, 0, 0, 0, 0); Log.PopActivity(); }