public static void segmentation_correspondences(Narray<Intarray> outsegments, Intarray seg, Intarray cseg) { if (NarrayUtil.Max(seg) >= 10000) throw new Exception("CHECK_ARG: (max(seg)<10000)"); if (NarrayUtil.Max(cseg) >= 10000) throw new Exception("CHECK_ARG: (max(cseg)<10000)"); int nseg = NarrayUtil.Max(seg) + 1; int ncseg = NarrayUtil.Max(cseg) + 1; Intarray overlaps = new Intarray(nseg, ncseg); overlaps.Fill(0); if (seg.Length() != cseg.Length()) throw new Exception("CHECK_ARG: (seg.Length()==cseg.Length())"); for (int i = 0; i < seg.Length(); i++) overlaps[seg.At1d(i), cseg.At1d(i)]++; outsegments.Clear(); outsegments.Resize(ncseg); for (int i = 0; i < nseg; i++) { int j = NarrayRowUtil.RowArgMax(overlaps, i); if (!(j >= 0 && j < ncseg)) throw new Exception("ASSERT: (j>=0 && j<ncseg)"); if (outsegments[j] == null) outsegments[j] = new Intarray(); outsegments[j].Push(i); } }
public static bool Equals(Intarray a, Intarray b) { if (a.Length() != b.Length()) return false; for (int i = 0; i < a.Length(); i++) if (a.UnsafeAt1d(i) != b.UnsafeAt1d(i)) return false; return true; }
/// <summary> /// Compute a classmap that maps a set of possibly sparse classes onto a dense /// list of new classes and vice versa /// </summary> public static void ClassMap(Intarray out_class_to_index, Intarray out_index_to_class, Intarray classes) { int nclasses = NarrayUtil.Max(classes) + 1; Intarray hist = new Intarray(nclasses); hist.Fill(0); for (int i = 0; i < classes.Length(); i++) { if (classes[i] == -1) continue; hist[classes[i]]++; } int count = 0; for (int i = 0; i < hist.Length(); i++) if (hist[i] > 0) count++; out_class_to_index.Resize(nclasses); out_class_to_index.Fill(-1); out_index_to_class.Resize(count); out_index_to_class.Fill(-1); int index = 0; for (int i = 0; i < hist.Length(); i++) { if (hist[i] > 0) { out_class_to_index[i] = index; out_index_to_class[index] = i; index++; } } CHECK_ARG(out_class_to_index.Length() == nclasses, "class_to_index.Length() == nclasses"); CHECK_ARG(out_index_to_class.Length() == NarrayUtil.Max(out_class_to_index) + 1, "index_to_class.Length() == Max(class_to_index)+1"); CHECK_ARG(out_index_to_class.Length() <= out_class_to_index.Length(), "index_to_class.Length() <= class_to_index.Length()"); }
/// <summary> /// Copy one FST to another. /// </summary> /// <param name="dst">The destination. Will be cleared before copying.</param> /// <param name="src">The FST to copy.</param> public static void fst_copy(IGenericFst dst, IGenericFst src) { dst.Clear(); int n = src.nStates(); for (int i = 0; i < n; i++) { dst.NewState(); } dst.SetStart(src.GetStart()); for (int i = 0; i < n; i++) { dst.SetAccept(i, src.GetAcceptCost(i)); Intarray targets = new Intarray(), outputs = new Intarray(), inputs = new Intarray(); Floatarray costs = new Floatarray(); src.Arcs(inputs, targets, outputs, costs, i); int inlen = inputs.Length(); if (inlen != targets.Length()) { throw new Exception("ASSERT: inputs.length() == targets.length()"); } if (inlen != outputs.Length()) { throw new Exception("ASSERT: inputs.length() == outputs.length()"); } if (inlen != costs.Length()) { throw new Exception("ASSERT: inputs.length() == costs.length()"); } for (int j = 0; j < inputs.Length(); j++) { dst.AddTransition(i, targets.At1d(j), outputs.At1d(j), costs.At1d(j), inputs.At1d(j)); } } }
public static void rseg_to_cseg(Intarray cseg, Intarray rseg, Intarray ids) { Intarray map = new Intarray(NarrayUtil.Max(rseg) + 1); map.Fill(0); int color = 0; for (int i = 0; i < ids.Length(); i++) { if (ids[i] == 0) { continue; } color++; int start = ids[i] >> 16; int end = ids[i] & 0xFFFF; if (start > end) { throw new Exception("segmentation encoded in IDs looks seriously broken!"); } if (start >= map.Length() || end >= map.Length()) { throw new Exception("segmentation encoded in IDs doesn't fit!"); } for (int j = start; j <= end; j++) { map[j] = color; } } cseg.MakeLike(rseg); for (int i = 0; i < cseg.Length1d(); i++) { cseg.Put1d(i, map[rseg.At1d(i)]); } }
public override void TrainDense(IDataset ds) { //PSet("%nsamples", ds.nSamples()); float split = PGetf("cv_split"); int mlp_cv_max = PGeti("cv_max"); if (crossvalidate) { // perform a split for cross-validation, making sure // that we don't have the same sample in both the // test and the training set (even if the data set // is the result of resampling) Intarray test_ids = new Intarray(); Intarray ids = new Intarray(); for (int i = 0; i < ds.nSamples(); i++) { ids.Push(ds.Id(i)); } NarrayUtil.Uniq(ids); Global.Debugf("cvdetail", "reduced {0} ids to {1} ids", ds.nSamples(), ids.Length()); NarrayUtil.Shuffle(ids); int nids = (int)((1.0 - split) * ids.Length()); nids = Math.Min(nids, mlp_cv_max); for (int i = 0; i < nids; i++) { test_ids.Push(ids[i]); } NarrayUtil.Quicksort(test_ids); Intarray training = new Intarray(); Intarray testing = new Intarray(); for (int i = 0; i < ds.nSamples(); i++) { int id = ds.Id(i); if (ClassifierUtil.Bincontains(test_ids, id)) { testing.Push(i); } else { training.Push(i); } } Global.Debugf("cvdetail", "#training {0} #testing {1}", training.Length(), testing.Length()); PSet("%ntraining", training.Length()); PSet("%ntesting", testing.Length()); Datasubset trs = new Datasubset(ds, training); Datasubset tss = new Datasubset(ds, testing); TrainBatch(trs, tss); } else { TrainBatch(ds, ds); } }
/// <summary> /// The main loop iteration. /// </summary> public void Radiate() { Clear(); //logger("beam", beam); //logger("beamcost", beamcost); int control_beam_start = beam.Length(); for (int i = 0; i < control_beam_start; i++) { TryAccept(i); } // in this loop, traversal may add "control nodes" to the beam for (int i = 0; i < beam.Length(); i++) { Traverse(stree.v1[beam[i]], stree.v2[beam[i]], beamcost[i], i); } // try accepts from control beam nodes // (they're not going to the next beam) for (int i = control_beam_start; i < beam.Length(); i++) { TryAccept(i); } Intarray new_beam = new Intarray(); Floatarray new_beamcost = new Floatarray(); for (int i = 0; i < nbest.Length(); i++) { int k = nbest.Tag(i); if (parent_trails[k] < 0) // skip the control beam nodes { continue; } new_beam.Push(stree.Add(beam[parent_trails[k]], all_targets1[k], all_targets2[k], all_inputs[k], all_outputs[k], all_costs[k])); new_beamcost.Push(beamcost[parent_trails[k]] + all_costs[k]); //logger.format("to new beam: trail index %d, stree %d, target %d,%d", //k, new_beam[new_beam.length() - 1], all_targets1[k], all_targets2[k]); } //move(beam, new_beam); beam.Move(new_beam); //move(beamcost, new_beamcost); beamcost.Move(new_beamcost); }
public override void Info() { bool bak = Logger.Default.verbose; Logger.Default.verbose = true; Logger.Default.WriteLine("Linerec"); PPrint(); Logger.Default.WriteLine(String.Format("segmenter: {0}", segmenter.IsEmpty ? "null" : segmenter.Object.Description)); Logger.Default.WriteLine(String.Format("grouper: {0}", grouper.IsEmpty ? "null" : grouper.Object.Description)); Logger.Default.WriteLine(String.Format("counts: {0} {1}", counts.Length(), NarrayUtil.Sum(counts))); //classifier.Object.Info(); Logger.Default.verbose = bak; }
public static bool Equals(Intarray a, Intarray b) { if (a.Length() != b.Length()) { return(false); } for (int i = 0; i < a.Length(); i++) { if (a.UnsafeAt1d(i) != b.UnsafeAt1d(i)) { return(false); } } return(true); }
public bool GetCharSegmentation(Intarray image, int book, int page, int line) { image.Clear(); bookstores[book].GetCharSegmentation(image, page, line, "gt"); if (image.Length() > 0) { return(true); } bookstores[book].GetCharSegmentation(image, page, line, ""); if (image.Length() > 0) { return(true); } return(false); }
/// <summary> /// Translate classes using a translation map /// </summary> private void ctranslate(Intarray result, Intarray values, Intarray translation) { result.Resize(values.Length()); for (int i = 0; i < values.Length(); i++) { int v = values[i]; if (v < 0) { result[i] = v; } else { result[i] = translation[v]; } } }
public override void FindBestCuts() { unchecked { for (int i = 0; i < cutcosts.Length(); i++) { NarrayUtil.ExtPut(dimage, i, (int)(cutcosts[i] + 10), 0xff0000); } for (int i = 0; i < cutcosts.Length(); i++) { NarrayUtil.ExtPut(dimage, i, (int)(min_thresh + 10), 0x800000); } } Floatarray temp = new Floatarray(); Gauss.Gauss1d(temp, cutcosts, 3.0f); cutcosts.Move(temp); SegmRoutine.local_minima(ref bestcuts, cutcosts, min_range, min_thresh); for (int i = 0; i < bestcuts.Length(); i++) { Narray <Point> cut = cuts[bestcuts[i]]; for (int j = 0; j < cut.Length(); j++) { Point p = cut[j]; NarrayUtil.ExtPut(dimage, p.X, p.Y, 0x00ff00); } } ///-if(debug.Length > 0) write_image_packed(debug, dimage); // dshow1d(cutcosts,"Y"); //dshow(dimage,"Y"); }
protected override void Train(IDataset ds) { if (!(ds.nSamples() > 0)) { throw new Exception("nSamples of IDataset must be > 0"); } if (!(ds.nFeatures() > 0)) { throw new Exception("nFeatures of IDataset must be > 0"); } if (c2i.Length() < 1) { Intarray raw_classes = new Intarray(); raw_classes.ReserveTo(ds.nSamples()); for (int i = 0; i < ds.nSamples(); i++) { raw_classes.Push(ds.Cls(i)); } ClassMap(c2i, i2c, raw_classes); /*Intarray classes = new Intarray(); * ctranslate(classes, raw_classes, c2i);*/ //debugf("info","[mapped %d to %d classes]\n",c2i.length(),i2c.length()); } TranslatedDataset mds = new TranslatedDataset(ds, c2i); TrainDense(mds); }
public static void line_segmentation_sort_x(Intarray segmentation) { if (NarrayUtil.Max(segmentation) > 100000) { throw new Exception("line_segmentation_merge_small_components: to many segments"); } Narray <Rect> bboxes = new Narray <Rect>(); ImgLabels.bounding_boxes(ref bboxes, segmentation); Floatarray x0s = new Floatarray(); unchecked { x0s.Push((float)-999999); } for (int i = 1; i < bboxes.Length(); i++) { if (bboxes[i].Empty()) { x0s.Push(999999); } else { x0s.Push(bboxes[i].x0); } } // dprint(x0s,1000); printf("\n"); Narray <int> permutation = new Intarray(); Narray <int> rpermutation = new Intarray(); NarrayUtil.Quicksort(permutation, x0s); rpermutation.Resize(permutation.Length()); for (int i = 0; i < permutation.Length(); i++) { rpermutation[permutation[i]] = i; } // dprint(rpermutation,1000); printf("\n"); for (int i = 0; i < segmentation.Length1d(); i++) { if (segmentation.At1d(i) == 0) { continue; } segmentation.Put1d(i, rpermutation[segmentation.At1d(i)]); } }
/// <summary> /// Copy one FST to another, preserving only lowest-cost arcs. /// This is useful for visualization. /// </summary> /// <param name="dst">The destination. Will be cleared before copying.</param> /// <param name="src">The FST to copy.</param> public static void fst_copy_best_arcs_only(IGenericFst dst, IGenericFst src) { dst.Clear(); int n = src.nStates(); for (int i = 0; i < n; i++) { dst.NewState(); } dst.SetStart(src.GetStart()); for (int i = 0; i < n; i++) { dst.SetAccept(i, src.GetAcceptCost(i)); Intarray targets = new Intarray(), outputs = new Intarray(), inputs = new Intarray(); Floatarray costs = new Floatarray(); src.Arcs(inputs, targets, outputs, costs, i); int inlen = inputs.Length(); if (inlen != targets.Length()) { throw new Exception("ASSERT: inputs.length() == targets.length()"); } if (inlen != outputs.Length()) { throw new Exception("ASSERT: inputs.length() == outputs.length()"); } if (inlen != costs.Length()) { throw new Exception("ASSERT: inputs.length() == costs.length()"); } Dictionary <int, int> hash = new Dictionary <int, int>(); for (int j = 0; j < n; j++) { int t = targets[j]; int best_so_far = -1; if (hash.ContainsKey(t)) { best_so_far = hash[t]; } if (best_so_far == -1 || costs[j] < costs[best_so_far]) { hash[t] = j; } } Intarray keys = new Intarray(); //hash.keys(keys); keys.Clear(); foreach (int key in hash.Keys) { keys.Push(key); } for (int k = 0; k < keys.Length(); k++) { int j = hash[keys[k]]; dst.AddTransition(i, targets[j], outputs[j], costs[j], inputs[j]); } } }
public override void Charseg(ref Intarray outimage, Bytearray inarray) { Bytearray image = new Bytearray(); image.Copy(inarray); OcrRoutine.binarize_simple(image); OcrRoutine.Invert(image); outimage.Copy(image); Intarray labels = new Intarray(); labels.Copy(image); ImgLabels.label_components(ref labels); Narray <Rect> boxes = new Narray <Rect>(); ImgLabels.bounding_boxes(ref boxes, labels); Intarray equiv = new Intarray(boxes.Length()); for (int i = 0; i < boxes.Length(); i++) { equiv[i] = i; } for (int i = 1; i < boxes.Length(); i++) { Rect p = boxes[i]; for (int j = 1; j < boxes.Length(); j++) { if (i == j) { continue; } Rect q = boxes[j]; int x0 = Math.Max(p.x0, q.x0); int x1 = Math.Min(p.x1, q.x1); int iw = x1 - x0; if (iw <= 0) { continue; // no overlap } int ow = Math.Min(p.Width(), q.Width()); float frac = iw / (float)(ow); if (frac < 0.5f) { continue; // insufficient overlap } // printf("%d %d : %d %d : %g\n",i,j,iw,ow,frac); equiv.Put1d(Math.Max(i, j), Math.Min(i, j)); } } for (int i = 0; i < labels.Length(); i++) { labels.Put1d(i, equiv.At1d(labels.At1d(i))); } ImgLabels.renumber_labels(labels, 1); outimage.Move(labels); SegmRoutine.make_line_segmentation_white(outimage); SegmRoutine.check_line_segmentation(outimage); }
/// <summary> /// Compute a classmap that maps a set of possibly sparse classes onto a dense /// list of new classes and vice versa /// </summary> public static void ClassMap(Intarray out_class_to_index, Intarray out_index_to_class, Intarray classes) { int nclasses = NarrayUtil.Max(classes) + 1; Intarray hist = new Intarray(nclasses); hist.Fill(0); for (int i = 0; i < classes.Length(); i++) { if (classes[i] == -1) { continue; } hist[classes[i]]++; } int count = 0; for (int i = 0; i < hist.Length(); i++) { if (hist[i] > 0) { count++; } } out_class_to_index.Resize(nclasses); out_class_to_index.Fill(-1); out_index_to_class.Resize(count); out_index_to_class.Fill(-1); int index = 0; for (int i = 0; i < hist.Length(); i++) { if (hist[i] > 0) { out_class_to_index[i] = index; out_index_to_class[index] = i; index++; } } CHECK_ARG(out_class_to_index.Length() == nclasses, "class_to_index.Length() == nclasses"); CHECK_ARG(out_index_to_class.Length() == NarrayUtil.Max(out_class_to_index) + 1, "index_to_class.Length() == Max(class_to_index)+1"); CHECK_ARG(out_index_to_class.Length() <= out_class_to_index.Length(), "index_to_class.Length() <= class_to_index.Length()"); }
public static void erase_small_components(Floatarray input, float mins = 0.2f, float thresh = 0.25f) { // compute a thresholded image for component labeling float threshold = thresh * NarrayUtil.Max(input); Intarray components = new Intarray(); components.MakeLike(input); components.Fill(0); for (int i = 0; i < components.Length(); i++) { components[i] = (input[i] > threshold ? 1 : 0); } // compute the number of pixels in each component int n = ImgLabels.label_components(ref components); Intarray totals = new Intarray(n + 1); totals.Fill(0); for (int i = 0; i < components.Length(); i++) { totals[components[i]] = totals[components[i]] + 1; } totals[0] = 0; int biggest = NarrayUtil.ArgMax(totals); // erase small components float minsize = mins * totals[biggest]; Bytearray keep = new Bytearray(n + 1); float background = NarrayUtil.Min(input); for (int i = 0; i < keep.Length(); i++) { keep[i] = (byte)(totals[i] > minsize ? 1 : 0); } for (int i = 0; i < input.Length(); i++) { if (keep[components[i]] == 0) { input[i] = background; } } }
public override void SplitIndices(Intarray result1, Intarray result2, Intarray indices) { result1.MakeLike(indices); result2.MakeLike(indices); int k = l2.nStates(); for (int i = 0; i < indices.Length(); i++) { result1.Put1d(i, indices.At1d(i) / k); result2.Put1d(i, indices.At1d(i) % k); } }
public static void segmentation_correspondences(Narray <Intarray> outsegments, Intarray seg, Intarray cseg) { if (NarrayUtil.Max(seg) >= 10000) { throw new Exception("CHECK_ARG: (max(seg)<10000)"); } if (NarrayUtil.Max(cseg) >= 10000) { throw new Exception("CHECK_ARG: (max(cseg)<10000)"); } int nseg = NarrayUtil.Max(seg) + 1; int ncseg = NarrayUtil.Max(cseg) + 1; Intarray overlaps = new Intarray(nseg, ncseg); overlaps.Fill(0); if (seg.Length() != cseg.Length()) { throw new Exception("CHECK_ARG: (seg.Length()==cseg.Length())"); } for (int i = 0; i < seg.Length(); i++) { overlaps[seg.At1d(i), cseg.At1d(i)]++; } outsegments.Clear(); outsegments.Resize(ncseg); for (int i = 0; i < nseg; i++) { int j = NarrayRowUtil.RowArgMax(overlaps, i); if (!(j >= 0 && j < ncseg)) { throw new Exception("ASSERT: (j>=0 && j<ncseg)"); } if (outsegments[j] == null) { outsegments[j] = new Intarray(); } outsegments[j].Push(i); } }
public static int count_samples(Intarray classes) { int count = 0; for (int i = 0; i < classes.Length(); i++) { if (classes[i] >= 0) { count++; } } return(count); }
/// <summary> /// Remove epsilons (zeros) and converts integers to string. /// </summary> public static void remove_epsilons(out string outs, Intarray a) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < a.Length(); i++) { if (a[i] > 0) { sb.Append((char)a[i]); } } outs = sb.ToString(); }
/// <summary> /// Reverse the FST's arcs, adding a new start vertex (former accept). /// </summary> public static void fst_copy_reverse(IGenericFst dst, IGenericFst src, bool no_accept = false) { dst.Clear(); int n = src.nStates(); for (int i = 0; i <= n; i++) { dst.NewState(); } if (!no_accept) { dst.SetAccept(src.GetStart()); } dst.SetStart(n); for (int i = 0; i < n; i++) { dst.AddTransition(n, i, 0, src.GetAcceptCost(i), 0); Intarray targets = new Intarray(), outputs = new Intarray(), inputs = new Intarray(); Floatarray costs = new Floatarray(); src.Arcs(inputs, targets, outputs, costs, i); if (inputs.Length() != targets.Length()) { throw new Exception("ASSERT: inputs.length() == targets.length()"); } if (inputs.Length() != outputs.Length()) { throw new Exception("ASSERT: inputs.length() == outputs.length()"); } if (inputs.Length() != costs.Length()) { throw new Exception("ASSERT: inputs.length() == costs.length()"); } for (int j = 0; j < inputs.Length(); j++) { dst.AddTransition(targets.At1d(j), i, outputs.At1d(j), costs.At1d(j), inputs.At1d(j)); } } }
public void FindBestCuts() { /*Intarray segm = new Intarray(); * segm.Copy(dimage); * ImgLabels.simple_recolor(segm); * ImgIo.write_image_packed("debug1.png", segm);*/ unchecked { for (int i = 0; i < cutcosts.Length(); i++) { NarrayUtil.ExtPut(dimage, i, (int)(cutcosts[i] + 10), 0xff0000); } for (int i = 0; i < cutcosts.Length(); i++) { NarrayUtil.ExtPut(dimage, i, (int)(min_thresh + 10), 0x800000); } } Floatarray temp = new Floatarray(); Gauss.Gauss1d(temp, cutcosts, cost_smooth); cutcosts.Move(temp); SegmRoutine.local_minima(ref bestcuts, cutcosts, min_range, min_thresh); for (int i = 0; i < bestcuts.Length(); i++) { Narray <Point> cut = cuts[bestcuts[i]]; for (int j = 0; j < cut.Length(); j++) { Point p = cut[j]; NarrayUtil.ExtPut(dimage, p.X, p.Y, 0x00ff00); } } /*segm.Copy(dimage); * ImgLabels.simple_recolor(segm); * ImgIo.write_image_packed("debug2.png", segm);*/ }
public int Add(int parent, int vertex1, int vertex2, int input, int output, float cost) { int n = parents.Length(); //logger.format("stree: [%d]: parent %d, v1 %d, v2 %d, cost %f", // n, parent, vertex1, vertex2, cost); parents.Push(parent); v1.Push(vertex1); v2.Push(vertex2); inputs.Push(input); outputs.Push(output); costs.Push(cost); return(n); }
/// <summary> /// Copy one FST to another. /// </summary> /// <param name="dst">The destination. Will be cleared before copying.</param> /// <param name="src">The FST to copy.</param> public static void fst_copy(IGenericFst dst, IGenericFst src) { dst.Clear(); int n = src.nStates(); for (int i = 0; i < n; i++) dst.NewState(); dst.SetStart(src.GetStart()); for (int i = 0; i < n; i++) { dst.SetAccept(i, src.GetAcceptCost(i)); Intarray targets = new Intarray(), outputs = new Intarray(), inputs = new Intarray(); Floatarray costs = new Floatarray(); src.Arcs(inputs, targets, outputs, costs, i); int inlen = inputs.Length(); if (inlen != targets.Length()) throw new Exception("ASSERT: inputs.length() == targets.length()"); if (inlen != outputs.Length()) throw new Exception("ASSERT: inputs.length() == outputs.length()"); if (inlen != costs.Length()) throw new Exception("ASSERT: inputs.length() == costs.length()"); for (int j = 0; j < inputs.Length(); j++) dst.AddTransition(i, targets.At1d(j), outputs.At1d(j), costs.At1d(j), inputs.At1d(j)); } }
public override void Rescore(int from, int to, int output, float cost, int input) { Intarray t = m_targets[from]; Intarray i = m_inputs[from]; Intarray o = m_outputs[from]; for (int j = 0; j < t.Length(); j++) { if (t[j] == to && i[j] == input && o[j] == output) { m_costs[from][j] = cost; break; } } }
public Dataset8(Narray<sbyte> data, Intarray classes) : this() { data.Copy(data); classes.Copy(classes); if (classes.Length() > 0) { nc = NarrayUtil.Max(classes) + 1; nf = data.Dim(1); //CHECK_ARG(NarrayUtil.Min(data) > -100 && NarrayUtil.Max(data) < 100, "min(data)>-100 && max(data)<100"); CHECK_ARG(NarrayUtil.Min(classes) >= -1 && NarrayUtil.Max(classes) < 10000, "min(classes)>=-1 && max(classes)<10000"); } else { nc = 0; nf = -1; } }
public Dataset8(Narray <sbyte> data, Intarray classes) : this() { data.Copy(data); classes.Copy(classes); if (classes.Length() > 0) { nc = NarrayUtil.Max(classes) + 1; nf = data.Dim(1); //CHECK_ARG(NarrayUtil.Min(data) > -100 && NarrayUtil.Max(data) < 100, "min(data)>-100 && max(data)<100"); CHECK_ARG(NarrayUtil.Min(classes) >= -1 && NarrayUtil.Max(classes) < 10000, "min(classes)>=-1 && max(classes)<10000"); } else { nc = 0; nf = -1; } }
/// <summary> /// Copy one FST to another, preserving only lowest-cost arcs. /// This is useful for visualization. /// </summary> /// <param name="dst">The destination. Will be cleared before copying.</param> /// <param name="src">The FST to copy.</param> public static void fst_copy_best_arcs_only(IGenericFst dst, IGenericFst src) { dst.Clear(); int n = src.nStates(); for (int i = 0; i < n; i++) dst.NewState(); dst.SetStart(src.GetStart()); for(int i = 0; i < n; i++) { dst.SetAccept(i, src.GetAcceptCost(i)); Intarray targets = new Intarray(), outputs = new Intarray(), inputs = new Intarray(); Floatarray costs = new Floatarray(); src.Arcs(inputs, targets, outputs, costs, i); int inlen = inputs.Length(); if (inlen != targets.Length()) throw new Exception("ASSERT: inputs.length() == targets.length()"); if (inlen != outputs.Length()) throw new Exception("ASSERT: inputs.length() == outputs.length()"); if (inlen != costs.Length()) throw new Exception("ASSERT: inputs.length() == costs.length()"); Dictionary< int, int > hash = new Dictionary<int,int>(); for(int j = 0; j < n; j++) { int t = targets[j]; int best_so_far = -1; if (hash.ContainsKey(t)) best_so_far = hash[t]; if(best_so_far == -1 || costs[j] < costs[best_so_far]) hash[t] = j; } Intarray keys = new Intarray(); //hash.keys(keys); keys.Clear(); foreach (int key in hash.Keys) { keys.Push(key); } for(int k = 0; k < keys.Length(); k++) { int j = hash[keys[k]]; dst.AddTransition(i, targets[j], outputs[j], costs[j], inputs[j]); } } }
protected static void write_node(BinaryWriter writer, IGenericFst fst, int index) { Intarray inputs = new Intarray(); Intarray targets = new Intarray(); Intarray outputs = new Intarray(); Floatarray costs = new Floatarray(); fst.Arcs(inputs, targets, outputs, costs, index); int narcs = targets.Length(); write_float(writer, fst.GetAcceptCost(index)); write_int64_LE(writer, narcs); for (int i = 0; i < narcs; i++) { write_int32_LE(writer, inputs[i]); write_int32_LE(writer, outputs[i]); write_float(writer, costs[i]); write_int32_LE(writer, targets[i]); } }
public void reconstruct_edges(Intarray inputs, Intarray outputs, Floatarray costs, Intarray vertices) { int n = vertices.Length(); inputs.Resize(n); outputs.Resize(n); costs.Resize(n); for (int i = 0; i < n - 1; i++) { int source = vertices[i]; int target = vertices[i + 1]; Intarray out_ins = new Intarray(); Intarray out_targets = new Intarray(); Intarray out_outs = new Intarray(); Floatarray out_costs = new Floatarray(); fst.Arcs(out_ins, out_targets, out_outs, out_costs, source); costs[i] = 1e38f; // find the best arc for (int j = 0; j < out_targets.Length(); j++) { if (out_targets[j] != target) { continue; } if (out_costs[j] < costs[i]) { inputs[i] = out_ins[j]; outputs[i] = out_outs[j]; costs[i] = out_costs[j]; } } } inputs[n - 1] = 0; outputs[n - 1] = 0; costs[n - 1] = fst.GetAcceptCost(vertices[n - 1]); }
public override void Charseg(ref Intarray outimage, Bytearray inarray) { Bytearray image = new Bytearray(); image.Copy(inarray); OcrRoutine.binarize_simple(image); OcrRoutine.Invert(image); outimage.Copy(image); Intarray labels = new Intarray(); labels.Copy(image); ImgLabels.label_components(ref labels); Narray<Rect> boxes = new Narray<Rect>(); ImgLabels.bounding_boxes(ref boxes, labels); Intarray equiv = new Intarray(boxes.Length()); for(int i=0; i<boxes.Length(); i++) equiv[i] = i; for(int i=1; i<boxes.Length(); i++) { Rect p = boxes[i]; for(int j=1;j<boxes.Length();j++) { if(i==j) continue; Rect q = boxes[j]; int x0 = Math.Max(p.x0, q.x0); int x1 = Math.Min(p.x1, q.x1); int iw = x1-x0; if(iw <= 0) continue; // no overlap int ow = Math.Min(p.Width(), q.Width()); float frac = iw/(float)(ow); if(frac < 0.5f) continue; // insufficient overlap // printf("%d %d : %d %d : %g\n",i,j,iw,ow,frac); equiv.Put1d(Math.Max(i, j), Math.Min(i, j)); } } for(int i=0; i<labels.Length(); i++) labels.Put1d(i, equiv.At1d(labels.At1d(i))); ImgLabels.renumber_labels(labels, 1); outimage.Move(labels); SegmRoutine.make_line_segmentation_white(outimage); SegmRoutine.check_line_segmentation(outimage); }
public void reconstruct_edges(Intarray inputs, Intarray outputs, Floatarray costs, Intarray vertices) { int n = vertices.Length(); inputs.Resize(n); outputs.Resize(n); costs.Resize(n); for (int i = 0; i < n - 1; i++) { int source = vertices[i]; int target = vertices[i + 1]; Intarray out_ins = new Intarray(); Intarray out_targets = new Intarray(); Intarray out_outs = new Intarray(); Floatarray out_costs = new Floatarray(); fst.Arcs(out_ins, out_targets, out_outs, out_costs, source); costs[i] = 1e38f; // find the best arc for (int j = 0; j < out_targets.Length(); j++) { if (out_targets[j] != target) continue; if (out_costs[j] < costs[i]) { inputs[i] = out_ins[j]; outputs[i] = out_outs[j]; costs[i] = out_costs[j]; } } } inputs[n - 1] = 0; outputs[n - 1] = 0; costs[n - 1] = fst.GetAcceptCost(vertices[n - 1]); }
public bool Step() { int node = heap.Pop(); if (node == n) { return(true); // accept has popped up } // get outbound arcs Intarray inputs = new Intarray(); Intarray targets = new Intarray(); Intarray outputs = new Intarray(); Floatarray costs = new Floatarray(); fst.Arcs(inputs, targets, outputs, costs, node); for (int i = 0; i < targets.Length(); i++) { int t = targets[i]; if (came_from[t] == -1 || g[node] + costs[i] < g[t]) { // relax the edge came_from[t] = node; g[t] = g[node] + costs[i]; heap.Push(t, g[t] + Convert.ToSingle(Heuristic(t))); } } if (accepted_from == -1 || g[node] + fst.GetAcceptCost(node) < g_accept) { // relax the accept edge accepted_from = node; g_accept = g[node] + fst.GetAcceptCost(node); heap.Push(n, g_accept); } return(false); }
/// <summary> /// Create char segmentation (cseg) files if missing /// </summary> /// <param name="bookPath">path to bookstore</param> /// <param name="modelFilename">Linerec model file</param> /// <param name="langModel">language model file</param> /// <param name="suffix">e.g., 'gt'</param> public void ComputeMissingCsegForBookStore(string bookPath, string model = "default.model", string suffix = "", bool saveRseg = false, string langModel = "default.fst") { // create line recognizer Linerec linerec = Linerec.LoadLinerec(model); // create IBookStore IBookStore bookstore = new SmartBookStore(); bookstore.SetPrefix(bookPath); bookstore.Info(); // language model OcroFST lmodel = OcroFST.MakeOcroFst(); lmodel.Load(langModel); // iterate lines of pages for (int page = 0; page < bookstore.NumberOfPages(); page++) { int nlines = bookstore.LinesOnPage(page); Console.WriteLine("Page {0} has {1} lines", page, nlines); for (int j = 0; j < nlines; j++) { int line = bookstore.GetLineId(page, j); Bytearray image = new Bytearray(); bookstore.GetLine(image, page, line); Intarray cseg = new Intarray(); bookstore.GetCharSegmentation(cseg, page, line, suffix); // check missing cseg file if (cseg.Length() <= 0 && image.Length() > 0) { // recognize line OcroFST fst = OcroFST.MakeOcroFst(); Intarray rseg = new Intarray(); linerec.RecognizeLine(rseg, fst, image); // find best results string resText; Intarray inp = new Intarray(); Floatarray costs = new Floatarray(); double totalCost = BeamSearch.beam_search(out resText, inp, costs, fst, lmodel, 100); Console.WriteLine(bookstore.PathFile(page, line, suffix)); Console.Write(" beam_search score: {0}", totalCost); /*string resText2; * fst.BestPath(out resText2);*/ // write cseg to bookstore string trans; bookstore.GetLine(out trans, page, line, suffix); resText = resText.Replace(" ", ""); if (String.IsNullOrEmpty(trans)) { bookstore.PutLine(resText, page, line, suffix); Console.Write("; transcript saved"); } else if (trans == resText) { // convert inputs and rseg to cseg SegmRoutine.rseg_to_cseg(cseg, rseg, inp); bookstore.PutCharSegmentation(cseg, page, line, suffix); Console.Write("; cseg saved"); } else if (saveRseg) { // convert inputs and rseg to cseg SegmRoutine.rseg_to_cseg(cseg, rseg, inp); //SegmRoutine.remove_small_components(cseg, 4); /*bookstore.PutCharSegmentation(cseg, page, line, suffix); * Console.Write("; cseg saved");*/ SegmRoutine.make_line_segmentation_white(cseg); ImgLabels.simple_recolor(cseg); string v = "rseg"; if (!String.IsNullOrEmpty(suffix)) { v += "."; v += suffix; } string rsegpath = bookstore.PathFile(page, line, v, "png"); ImgIo.write_image_packed(rsegpath, cseg); Console.Write("; rseg saved"); } Console.WriteLine(); } } } }
protected void rescale(Floatarray v, Floatarray input) { if (input.Rank() != 2) throw new Exception("CHECK_ARG: sub.Rank()==2"); Floatarray sub = new Floatarray(); // find the largest connected component // and crop to its bounding box // (use a binary version of the character // to compute the bounding box) Intarray components = new Intarray(); float threshold = PGetf("threshold") * NarrayUtil.Max(input); Global.Debugf("biggestcc", "threshold {0}", threshold); components.MakeLike(input); components.Fill(0); for (int i = 0; i < components.Length(); i++) components[i] = (input[i] > threshold ? 1 : 0); int n = ImgLabels.label_components(ref components); Intarray totals = new Intarray(n + 1); totals.Fill(0); for (int i = 0; i < components.Length(); i++) totals[components[i]]++; totals[0] = 0; Narray<Rect> boxes = new Narray<Rect>(); ImgLabels.bounding_boxes(ref boxes, components); int biggest = NarrayUtil.ArgMax(totals); Rect r = boxes[biggest]; int pad = (int)(PGetf("pad") + 0.5f); r.PadBy(pad, pad); Global.Debugf("biggestcc", "({0}) {1}[{2}] :: {3} {4} {5} {6}", n, biggest, totals[biggest], r.x0, r.y0, r.x1, r.y1); // now perform normal feature extraction // (use the original grayscale input) sub = input; ImgMisc.Crop(sub, r); int csize = PGeti("csize"); float s = Math.Max(sub.Dim(0), sub.Dim(1))/(float)csize; if(PGetf("noupscale") > 0 && s < 1.0f) s = 1.0f; float sig = s * PGetf("aa"); float dx = (csize*s-sub.Dim(0))/2f; float dy = (csize*s-sub.Dim(1))/2f; if(sig > 1e-3f) Gauss.Gauss2d(sub, sig, sig); v.Resize(csize, csize); v.Fill(0f); for (int i = 0; i < csize; i++) { for (int j = 0; j < csize; j++) { float x = i * s - dx; float y = j * s - dy; if (x < 0 || x >= sub.Dim(0)) continue; if (y < 0 || y >= sub.Dim(1)) continue; float value = ImgOps.bilin(sub, x, y); v[i, j] = value; } } /*Global.Debugf("biggestcc", "{0} {1} ({2}) -> {3} {4} ({5})", sub.Dim(0), sub.Dim(1), NarrayUtil.Max(sub), v.Dim(0), v.Dim(1), NarrayUtil.Max(v));*/ }
/// <summary> /// Create char segmentation (cseg) files if missing /// </summary> /// <param name="bookPath">path to bookstore</param> /// <param name="modelFilename">Linerec model file</param> /// <param name="langModel">language model file</param> /// <param name="suffix">e.g., 'gt'</param> public void ComputeMissingCsegForBookStore(string bookPath, string model = "default.model", string suffix = "", bool saveRseg = false, string langModel = "default.fst") { // create line recognizer Linerec linerec = Linerec.LoadLinerec(model); // create IBookStore IBookStore bookstore = new SmartBookStore(); bookstore.SetPrefix(bookPath); bookstore.Info(); // language model OcroFST lmodel = OcroFST.MakeOcroFst(); lmodel.Load(langModel); // iterate lines of pages for (int page = 0; page < bookstore.NumberOfPages(); page++) { int nlines = bookstore.LinesOnPage(page); Console.WriteLine("Page {0} has {1} lines", page, nlines); for (int j = 0; j < nlines; j++) { int line = bookstore.GetLineId(page, j); Bytearray image = new Bytearray(); bookstore.GetLine(image, page, line); Intarray cseg = new Intarray(); bookstore.GetCharSegmentation(cseg, page, line, suffix); // check missing cseg file if (cseg.Length() <= 0 && image.Length() > 0) { // recognize line OcroFST fst = OcroFST.MakeOcroFst(); Intarray rseg = new Intarray(); linerec.RecognizeLine(rseg, fst, image); // find best results string resText; Intarray inp = new Intarray(); Floatarray costs = new Floatarray(); double totalCost = BeamSearch.beam_search(out resText, inp, costs, fst, lmodel, 100); Console.WriteLine(bookstore.PathFile(page, line, suffix)); Console.Write(" beam_search score: {0}", totalCost); /*string resText2; fst.BestPath(out resText2);*/ // write cseg to bookstore string trans; bookstore.GetLine(out trans, page, line, suffix); resText = resText.Replace(" ", ""); if (String.IsNullOrEmpty(trans)) { bookstore.PutLine(resText, page, line, suffix); Console.Write("; transcript saved"); } else if (trans == resText) { // convert inputs and rseg to cseg SegmRoutine.rseg_to_cseg(cseg, rseg, inp); bookstore.PutCharSegmentation(cseg, page, line, suffix); Console.Write("; cseg saved"); } else if (saveRseg) { // convert inputs and rseg to cseg SegmRoutine.rseg_to_cseg(cseg, rseg, inp); //SegmRoutine.remove_small_components(cseg, 4); /*bookstore.PutCharSegmentation(cseg, page, line, suffix); Console.Write("; cseg saved");*/ SegmRoutine.make_line_segmentation_white(cseg); ImgLabels.simple_recolor(cseg); string v = "rseg"; if (!String.IsNullOrEmpty(suffix)) { v += "."; v += suffix; } string rsegpath = bookstore.PathFile(page, line, v, "png"); ImgIo.write_image_packed(rsegpath, cseg); Console.Write("; rseg saved"); } Console.WriteLine(); } } } }
public void TestTrainLenetCseg() { string bookPath = "data\\0000\\"; string netFileName = "latin-lenet.model"; Linerec.GDef("linerec", "use_reject", 1); Linerec.GDef("lenet", "junk", 1); Linerec.GDef("lenet", "epochs", 4); // create Linerec Linerec linerec; if (File.Exists(netFileName)) linerec = Linerec.LoadLinerec(netFileName); else { linerec = new Linerec("lenet"); LenetClassifier classifier = linerec.GetClassifier() as LenetClassifier; if (classifier != null) classifier.InitNumSymbLatinAlphabet(); } // temporary disable junk //linerec.DisableJunk = true; linerec.StartTraining(); int nepochs = 10; LineSource lines = new LineSource(); lines.Init(new string[] { "data2" }); //linerec.GetClassifier().Set("epochs", 1); for (int epoch = 1; epoch <= nepochs; epoch++) { linerec.Epoch(epoch); // load cseg samples while (!lines.Done()) { lines.MoveNext(); Intarray cseg = new Intarray(); //Bytearray image = new Bytearray(); string transcript = lines.GetTranscript(); //lines.GetImage(image); if (!lines.GetCharSegmentation(cseg) && cseg.Length() == 0) { Global.Debugf("warn", "skipping book {0} page {1} line {2} (no or bad cseg)", lines.CurrentBook, lines.CurrentPage, lines.Current); continue; } SegmRoutine.make_line_segmentation_black(cseg); linerec.AddTrainingLine(cseg, transcript); } lines.Reset(); lines.Shuffle(); // do Train and clear Dataset linerec.FinishTraining(); // do save if (epoch % 1 == 0) linerec.Save(netFileName); // recognize test line bool bakDisJunk = linerec.DisableJunk; linerec.DisableJunk = false; DoTestLinerecRecognize(linerec, "data2\\", "test1.png"); linerec.DisableJunk = bakDisJunk; } // finnaly save linerec.Save(netFileName); }
public override void Arcs(Intarray ids, Intarray targets, Intarray outputs, Floatarray costs, int node) { int n1 = node / l2.nStates(); int n2 = node % l2.nStates(); Intarray ids1 = new Intarray(); Intarray ids2 = new Intarray(); Intarray t1 = new Intarray(); Intarray t2 = new Intarray(); Intarray o1 = new Intarray(); Intarray o2 = new Intarray(); Floatarray c1 = new Floatarray(); Floatarray c2 = new Floatarray(); l1.Arcs(ids1, t1, o1, c1, n1); l2.Arcs(ids2, t2, o2, c2, n2); // sort & permute Intarray p1 = new Intarray(); Intarray p2 = new Intarray(); NarrayUtil.Quicksort(p1, o1); NarrayUtil.Permute(ids1, p1); NarrayUtil.Permute(t1, p1); NarrayUtil.Permute(o1, p1); NarrayUtil.Permute(c1, p1); NarrayUtil.Quicksort(p2, ids2); NarrayUtil.Permute(ids2, p2); NarrayUtil.Permute(t2, p2); NarrayUtil.Permute(o2, p2); NarrayUtil.Permute(c2, p2); int k1, k2; // l1 epsilon moves for (k1 = 0; k1 < o1.Length() && o1.At1d(k1) == 0; k1++) { ids.Push(ids1.At1d(k1)); targets.Push(Combine(t1.At1d(k1), n2)); outputs.Push(0); costs.Push(c1.At1d(k1)); } // l2 epsilon moves for (k2 = 0; k2 < o2.Length() && ids2.At1d(k2) == 0; k2++) { ids.Push(0); targets.Push(Combine(n1, t2.At1d(k2))); outputs.Push(o2.At1d(k2)); costs.Push(c2.At1d(k2)); } // non-epsilon moves while (k1 < o1.Length() && k2 < ids2.Length()) { while (k1 < o1.Length() && o1.At1d(k1) < ids2.At1d(k2)) k1++; if (k1 >= o1.Length()) break; while (k2 < ids2.Length() && o1.At1d(k1) > ids2.At1d(k2)) k2++; while (k1 < o1.Length() && k2 < ids2.Length() && o1.At1d(k1) == ids2.At1d(k2)) { for (int j = k2; j < ids2.Length() && o1.At1d(k1) == ids2.At1d(j); j++) { ids.Push(ids1.At1d(k1)); targets.Push(Combine(t1.At1d(k1), t2.At1d(j))); outputs.Push(o2.At1d(j)); costs.Push(c1.At1d(k1) + c2.At1d(j)); } k1++; } } }
public override void TrainDense(IDataset ds) { //PSet("%nsamples", ds.nSamples()); float split = PGetf("cv_split"); int mlp_cv_max = PGeti("cv_max"); if (crossvalidate) { // perform a split for cross-validation, making sure // that we don't have the same sample in both the // test and the training set (even if the data set // is the result of resampling) Intarray test_ids = new Intarray(); Intarray ids = new Intarray(); for (int i = 0; i < ds.nSamples(); i++) ids.Push(ds.Id(i)); NarrayUtil.Uniq(ids); Global.Debugf("cvdetail", "reduced {0} ids to {1} ids", ds.nSamples(), ids.Length()); NarrayUtil.Shuffle(ids); int nids = (int)((1.0 - split) * ids.Length()); nids = Math.Min(nids, mlp_cv_max); for (int i = 0; i < nids; i++) test_ids.Push(ids[i]); NarrayUtil.Quicksort(test_ids); Intarray training = new Intarray(); Intarray testing = new Intarray(); for (int i = 0; i < ds.nSamples(); i++) { int id = ds.Id(i); if (ClassifierUtil.Bincontains(test_ids, id)) testing.Push(i); else training.Push(i); } Global.Debugf("cvdetail", "#training {0} #testing {1}", training.Length(), testing.Length()); PSet("%ntraining", training.Length()); PSet("%ntesting", testing.Length()); Datasubset trs = new Datasubset(ds, training); Datasubset tss = new Datasubset(ds, testing); TrainBatch(trs, tss); } else { TrainBatch(ds, ds); } }
/// <summary> /// Translate classes using a translation map /// </summary> private void ctranslate(Intarray result, Intarray values, Intarray translation) { result.Resize(values.Length()); for (int i = 0; i < values.Length(); i++) { int v = values[i]; if (v < 0) result[i] = v; else result[i] = translation[v]; } }
public void TestTrainLenetCseg() { string bookPath = "data\\0000\\"; string netFileName = "latin-lenet.model"; Linerec.GDef("linerec", "use_reject", 1); Linerec.GDef("lenet", "junk", 1); Linerec.GDef("lenet", "epochs", 4); // create Linerec Linerec linerec; if (File.Exists(netFileName)) { linerec = Linerec.LoadLinerec(netFileName); } else { linerec = new Linerec("lenet"); LenetClassifier classifier = linerec.GetClassifier() as LenetClassifier; if (classifier != null) { classifier.InitNumSymbLatinAlphabet(); } } // temporary disable junk //linerec.DisableJunk = true; linerec.StartTraining(); int nepochs = 10; LineSource lines = new LineSource(); lines.Init(new string[] { "data2" }); //linerec.GetClassifier().Set("epochs", 1); for (int epoch = 1; epoch <= nepochs; epoch++) { linerec.Epoch(epoch); // load cseg samples while (!lines.Done()) { lines.MoveNext(); Intarray cseg = new Intarray(); //Bytearray image = new Bytearray(); string transcript = lines.GetTranscript(); //lines.GetImage(image); if (!lines.GetCharSegmentation(cseg) && cseg.Length() == 0) { Global.Debugf("warn", "skipping book {0} page {1} line {2} (no or bad cseg)", lines.CurrentBook, lines.CurrentPage, lines.Current); continue; } SegmRoutine.make_line_segmentation_black(cseg); linerec.AddTrainingLine(cseg, transcript); } lines.Reset(); lines.Shuffle(); // do Train and clear Dataset linerec.FinishTraining(); // do save if (epoch % 1 == 0) { linerec.Save(netFileName); } // recognize test line bool bakDisJunk = linerec.DisableJunk; linerec.DisableJunk = false; DoTestLinerecRecognize(linerec, "data2\\", "test1.png"); linerec.DisableJunk = bakDisJunk; } // finnaly save linerec.Save(netFileName); }
/// <summary> /// Reverse the FST's arcs, adding a new start vertex (former accept). /// </summary> public static void fst_copy_reverse(IGenericFst dst, IGenericFst src, bool no_accept = false) { dst.Clear(); int n = src.nStates(); for (int i = 0; i <= n; i++) dst.NewState(); if (!no_accept) dst.SetAccept(src.GetStart()); dst.SetStart(n); for (int i = 0; i < n; i++) { dst.AddTransition(n, i, 0, src.GetAcceptCost(i), 0); Intarray targets = new Intarray(), outputs = new Intarray(), inputs = new Intarray(); Floatarray costs = new Floatarray(); src.Arcs(inputs, targets, outputs, costs, i); if (inputs.Length() != targets.Length()) throw new Exception("ASSERT: inputs.length() == targets.length()"); if (inputs.Length() != outputs.Length()) throw new Exception("ASSERT: inputs.length() == outputs.length()"); if (inputs.Length() != costs.Length()) throw new Exception("ASSERT: inputs.length() == costs.length()"); for (int j = 0; j < inputs.Length(); j++) dst.AddTransition(targets.At1d(j), i, outputs.At1d(j), costs.At1d(j), inputs.At1d(j)); } }
/// <summary> /// Remove epsilons (zeros) and converts integers to string. /// </summary> public static void remove_epsilons(out string outs, Intarray a) { StringBuilder sb = new StringBuilder(); for (int i = 0; i < a.Length(); i++) { if (a[i] > 0) sb.Append((char)a[i]); } outs = sb.ToString(); }
public override int nSamples() { return(_samples.Length()); }
public bool Step() { int node = heap.Pop(); if (node == n) return true; // accept has popped up // get outbound arcs Intarray inputs = new Intarray(); Intarray targets = new Intarray(); Intarray outputs = new Intarray(); Floatarray costs = new Floatarray(); fst.Arcs(inputs, targets, outputs, costs, node); for (int i = 0; i < targets.Length(); i++) { int t = targets[i]; if (came_from[t] == -1 || g[node] + costs[i] < g[t]) { // relax the edge came_from[t] = node; g[t] = g[node] + costs[i]; heap.Push(t, g[t] + Convert.ToSingle(Heuristic(t))); } } if (accepted_from == -1 || g[node] + fst.GetAcceptCost(node) < g_accept) { // relax the accept edge accepted_from = node; g_accept = g[node] + fst.GetAcceptCost(node); heap.Push(n, g_accept); } return false; }