/// <summary> /// 删除线 /// </summary> /// <param name="model"></param> private void DeleteLine(MDF_ConnectLineModel model) { if (canvasLine == null) { return; } if (model == null) { return; } MDF_Line line = canvasLine.Children.FirstOrDefault(p => ((p as FrameworkElement).DataContext as MDF_ConnectLineModel) == model) as MDF_Line; if (line == null) { return; } if (LineCheck != null && !LineCheck.DeleckLine(model)) { return; } line.LineSelected -= line_LineSelected; canvasLine.Children.Remove(line); LineSource.Remove(model); }
public FrameModel(string fileName, int lineNumber = 0, LineSource lineSource = LineSource.FileLineNumber, string method = null) { FileName = fileName; LineNumber = lineNumber; Method = method; LineSource = lineSource; }
private void DrawLine(MDF_ConnectItemModel from, MDF_ConnectItemModel to) { if (from == null || to == null) { return; } MDF_Line line = CreateLine(new MDF_ConnectLineModel() { From = from.Key, To = to.Key }); if (line == null) { return; } ComputeLineInfo(line); if (LineSource == null) { LineSource = new ObservableCollection <MDF_ConnectLineModel>(); } LineSource.Add(line.DataContext as MDF_ConnectLineModel); SelectedItem = null; this.UpdateLayout(); }
public IEnumerable <string[]> Parse() { foreach (string line in LineSource.GetLines()) { yield return(ParseLine(line)); } }
public void FromLineSource(LineSource lineSource) { foreach (var bline in lineSource) { bline.SetLineSource(lineSource); Add(bline); } }
public void EmptyLineSourceIsEmpty() { var source = new LineSource(String.Empty); source.IsFinished() .Should() .BeTrue(); }
public void LineSourceSplitsCorrectly() { var input = "line1\rline2"; var source = new LineSource(input); source.TotalLines.Should() .Be(2); }
//Below is ETL Logic. //----------------------------------------------------------------- void GetVarsFromParent() { parentScript = transform.parent.GetComponent <LineSource>(); numPointsPerLine = parentScript.numPointsPerLine; branchPointBuffer = parentScript.branchPointBuffer; print("branchPointBuffer= " + branchPointBuffer); //if (_ChildLineRenderer == null) // _ChildLineRenderer = parentScript._ChildLineRenderer; }
public void BasicElementIsRead() { var input = @"<element >"; var source = new LineSource(input); var elements = ReaperProjectParser.ParseFromLines(source); elements.Length.Should() .Be(1); var projectElement = elements.First() as ProjectElement; projectElement.Should() .NotBeNull(); projectElement.Header.Should() .Be("element"); }
public PageSegmenter() { InitializeComponent(); bookstoreSource = new BookLineList(); listingDataView = new CollectionViewSource(); listingDataView.Source = bookstoreSource; this.DataContext = listingDataView; // предоставим доступ к коллекции из языка разметки XAML listingDataView.View.CurrentChanged += new EventHandler(BookLine_CurrentChanged); lineSource = new LineSource(); lineSource.PSet("randomize", 0); // отключим случайное упорядочивание // Init BookstoreSource tbBookPath.Text = DynamicVizSegmenter.Properties.Settings.Default.BookPath; BookStorePaths = tbBookPath.Text; }
private CollectionViewSource listingDataView; // представление коллекции BooksLine #endregion Fields #region Constructors public PageSegmenter() { InitializeComponent(); bookstoreSource = new BookLineList(); listingDataView = new CollectionViewSource(); listingDataView.Source = bookstoreSource; this.DataContext = listingDataView; // предоставим доступ к коллекции из языка разметки XAML listingDataView.View.CurrentChanged += new EventHandler(BookLine_CurrentChanged); lineSource = new LineSource(); lineSource.PSet("randomize", 0); // отключим случайное упорядочивание // Init BookstoreSource tbBookPath.Text = DynamicVizSegmenter.Properties.Settings.Default.BookPath; BookStorePaths = tbBookPath.Text; }
static void Main(string[] args) { var rppFiles = Directory.EnumerateFiles(args[0], "*.rpp", SearchOption.AllDirectories); foreach (var r in rppFiles) { Console.WriteLine($"Trying to read {r}"); var enclosingFolder = Path.GetFileName(Path.GetDirectoryName(r)); var projName = Path.GetFileName(r); var txt = File.ReadAllText(r); var source = new LineSource(txt); var project = ReaperProjectParser.ParseProjectFromLines(source); var sources = project.ElementsAndDescendants() .OfType <ProjectSource>( ) .Where(src => src.Source == ProjectSource.SourceType.File) .ToArray(); foreach (var projectSource in sources) { Console.WriteLine($" src:{projectSource.File}"); if (projectSource.File.Contains("Neil")) { var fileName = Path.GetFileName(projectSource.File); var newSource = projectSource.WithFile(fileName); project = (ProjectElement)project.Replace(projectSource, newSource); } } var destFolder = Path.Combine(args[1], enclosingFolder); var destFile = Path.Combine(destFolder, projName); Console.WriteLine($"creating {destFolder} for {destFile}"); Directory.CreateDirectory(destFolder); File.WriteAllText(destFile, project.AsString(0)); } }
public void LineSourceCorrectlyAdvances() { var source = new LineSource("a\rb"); source.IsFinished() .Should() .BeFalse(); source.AdvanceLine() .Should() .Be("a"); source.IsFinished() .Should() .BeFalse(); source.AdvanceLine() .Should() .Be("b"); source.IsFinished() .Should() .BeTrue(); }
public void SimpleProjectCanBeRoundTripped() { var input = @"<A line 1 line 2 <B header line B1 line B2 > line3 line4 > "; var source = new LineSource(input); var project = ReaperProjectParser.ParseProjectFromLines(source); var rendered = ReaperProjectParser.Render(project); rendered.Should() .Be(input); }
public void SetLineSource(LineSource lineSource) { _LineSource = lineSource; }
public void TestTrainLenetCseg() { string bookPath = "data\\0000\\"; string netFileName = "latin-lenet.model"; Linerec.GDef("linerec", "use_reject", 1); Linerec.GDef("lenet", "junk", 1); Linerec.GDef("lenet", "epochs", 4); // create Linerec Linerec linerec; if (File.Exists(netFileName)) linerec = Linerec.LoadLinerec(netFileName); else { linerec = new Linerec("lenet"); LenetClassifier classifier = linerec.GetClassifier() as LenetClassifier; if (classifier != null) classifier.InitNumSymbLatinAlphabet(); } // temporary disable junk //linerec.DisableJunk = true; linerec.StartTraining(); int nepochs = 10; LineSource lines = new LineSource(); lines.Init(new string[] { "data2" }); //linerec.GetClassifier().Set("epochs", 1); for (int epoch = 1; epoch <= nepochs; epoch++) { linerec.Epoch(epoch); // load cseg samples while (!lines.Done()) { lines.MoveNext(); Intarray cseg = new Intarray(); //Bytearray image = new Bytearray(); string transcript = lines.GetTranscript(); //lines.GetImage(image); if (!lines.GetCharSegmentation(cseg) && cseg.Length() == 0) { Global.Debugf("warn", "skipping book {0} page {1} line {2} (no or bad cseg)", lines.CurrentBook, lines.CurrentPage, lines.Current); continue; } SegmRoutine.make_line_segmentation_black(cseg); linerec.AddTrainingLine(cseg, transcript); } lines.Reset(); lines.Shuffle(); // do Train and clear Dataset linerec.FinishTraining(); // do save if (epoch % 1 == 0) linerec.Save(netFileName); // recognize test line bool bakDisJunk = linerec.DisableJunk; linerec.DisableJunk = false; DoTestLinerecRecognize(linerec, "data2\\", "test1.png"); linerec.DisableJunk = bakDisJunk; } // finnaly save linerec.Save(netFileName); }
static void Process(IScriptSource source, StringBuilder sb, List<LineSource> map, int level, ref int count, string includeArg) { var chunk = new LineSource(source); var localCount = 0; var indent = (string)null; var lines = source.AsEnumerableLines(); foreach (var line in lines) { /// Append lines that don't start with <see cref="IncLex"/>. /// if (line.Length < IncLexMinLen || !line.StartsWith(IncLex)) { sb.AppendLine(line); Add(chunk, ref count, ref localCount); continue; } /// Get the argument portion of the include statement. /// If none can be found, just append the line. var argEnd = line.IndexOfAny(new[] { '>', '"' }, IncLexLen + 1); if (argEnd < IncLexLen) { sb.AppendLine(line); Add(chunk, ref count, ref localCount); continue; } var arg = line.Substring(IncLexLen + 1, argEnd - IncLexLen - 1); /// Get the argument delimiter, either an angle-bracket or /// a double-quote. An angle-bracket is an embedded file, /// a double-quote is a regular file or possibly a uri in /// the future. var c = line[IncLexLen]; switch (c) { case '<': /// Embedded Path. indent = indent ?? new String(' ', level); sb.AppendLine(String.Format("{0}// <{1}> include BEGIN", indent, arg)); Add(chunk, ref count, ref localCount); map.Add(chunk); var efileSrc = new ScriptEmbedded(arg); Process(efileSrc, sb, map, level + 1, ref count, arg); // New chunk of source. chunk = new LineSource(source, localCount); break; case '"': /// TODO: Preprocess Include File Path. throw new NotImplementedException(); default: sb.AppendLine(line); Add(chunk, ref count, ref localCount); break; } } if (level > 0) { indent = new String(' ', level - 1); sb.AppendLine(String.Format("{0}// <{1}> include END", indent, includeArg)); Add(chunk, ref count, ref localCount); } map.Add(chunk); }
public void TestTrainLenetCseg() { string bookPath = "data\\0000\\"; string netFileName = "latin-lenet.model"; Linerec.GDef("linerec", "use_reject", 1); Linerec.GDef("lenet", "junk", 1); Linerec.GDef("lenet", "epochs", 4); // create Linerec Linerec linerec; if (File.Exists(netFileName)) { linerec = Linerec.LoadLinerec(netFileName); } else { linerec = new Linerec("lenet"); LenetClassifier classifier = linerec.GetClassifier() as LenetClassifier; if (classifier != null) { classifier.InitNumSymbLatinAlphabet(); } } // temporary disable junk //linerec.DisableJunk = true; linerec.StartTraining(); int nepochs = 10; LineSource lines = new LineSource(); lines.Init(new string[] { "data2" }); //linerec.GetClassifier().Set("epochs", 1); for (int epoch = 1; epoch <= nepochs; epoch++) { linerec.Epoch(epoch); // load cseg samples while (!lines.Done()) { lines.MoveNext(); Intarray cseg = new Intarray(); //Bytearray image = new Bytearray(); string transcript = lines.GetTranscript(); //lines.GetImage(image); if (!lines.GetCharSegmentation(cseg) && cseg.Length() == 0) { Global.Debugf("warn", "skipping book {0} page {1} line {2} (no or bad cseg)", lines.CurrentBook, lines.CurrentPage, lines.Current); continue; } SegmRoutine.make_line_segmentation_black(cseg); linerec.AddTrainingLine(cseg, transcript); } lines.Reset(); lines.Shuffle(); // do Train and clear Dataset linerec.FinishTraining(); // do save if (epoch % 1 == 0) { linerec.Save(netFileName); } // recognize test line bool bakDisJunk = linerec.DisableJunk; linerec.DisableJunk = false; DoTestLinerecRecognize(linerec, "data2\\", "test1.png"); linerec.DisableJunk = bakDisJunk; } // finnaly save linerec.Save(netFileName); }
static void Add(LineSource chunk, ref int count, ref int localCount) { if (chunk.First < 0) chunk.First = count; chunk.Last = count; count++; localCount++; }