Example #1
0
 /// <summary>
 /// The constructor.
 /// </summary>
 /// <param name="colTop">Specifies the top blobs.</param>
 /// <param name="rgbPropagateDown">Specifies whether or not to backpropagate down.</param>
 /// <param name="colBottom">Specifies the bottom blobs.</param>
 public BackwardArgs(BlobCollection <T> colTop, List <bool> rgbPropagateDown, BlobCollection <T> colBottom)
     : base(colBottom, colTop)
 {
     m_rgbPropagateDown = rgbPropagateDown;
 }
Example #2
0
 /// <summary>
 /// The constructor.
 /// </summary>
 /// <param name="colBottom">Specifies the bottom blobs.</param>
 /// <param name="colTop">Specifies the top blobs.</param>
 public ForwardArgs(BlobCollection <T> colBottom, BlobCollection <T> colTop)
 {
     m_colTop    = colTop;
     m_colBottom = colBottom;
 }
Example #3
0
        /// <summary>
        /// Perform the beam-search.
        /// </summary>
        /// <param name="input">Specifies the input data (e.g. the encoder input)</param>
        /// <param name="nK">Specifies the beam width for the search.</param>
        /// <param name="dfThreshold">Specifies the threshold where detected items with probabilities less than the threshold are ignored (default = 0.01).</param>
        /// <param name="nMax">Specifies the maximum length to process (default = 80)</param>
        /// <returns>The list of top sequences is returned.</returns>
        /// <remarks>
        /// The beam-search algorithm is inspired by the article
        /// @see [How to Implement a Beam Search Decoder for Natural Language Processing](https://machinelearningmastery.com/beam-search-decoder-natural-language-processing/) by Jason Brownlee, "Machine Learning Mastery", 2018
        /// </remarks>
        public List <Tuple <double, bool, List <Tuple <string, int, double> > > > Search(PropertySet input, int nK, double dfThreshold = 0.01, int nMax = 80)
        {
            List <Tuple <double, bool, List <Tuple <string, int, double> > > > rgSequences = new List <Tuple <double, bool, List <Tuple <string, int, double> > > >();

            rgSequences.Add(new Tuple <double, bool, List <Tuple <string, int, double> > >(0, false, new List <Tuple <string, int, double> >()));

            BlobCollection <T> colBottom = m_layer.PreProcessInput(input, null);
            double             dfLoss;
            string             strInput = input.GetProperty("InputData");
            bool bDone = false;

            BlobCollection <T> colTop = m_net.Forward(colBottom, out dfLoss);
            List <Tuple <string, int, double> > rgRes = m_layer.PostProcessOutput(colTop[0], nK);

            rgRes = rgRes.Where(p => p.Item3 >= dfThreshold).ToList();
            List <List <Tuple <string, int, double> > > rgrgRes = new List <List <Tuple <string, int, double> > >();

            rgrgRes.Add(rgRes);

            while (!bDone && nMax > 0)
            {
                int nProcessedCount = 0;

                List <Tuple <double, bool, List <Tuple <string, int, double> > > > rgCandidates = new List <Tuple <double, bool, List <Tuple <string, int, double> > > >();

                for (int i = 0; i < rgSequences.Count; i++)
                {
                    if (rgrgRes[i].Count > 0)
                    {
                        for (int j = 0; j < rgrgRes[i].Count; j++)
                        {
                            if (rgrgRes[i][j].Item1.Length > 0)
                            {
                                double dfScore = rgSequences[i].Item1 - Math.Log(rgrgRes[i][j].Item3);

                                List <Tuple <string, int, double> > rgSequence1 = new List <Tuple <string, int, double> >();
                                rgSequence1.AddRange(rgSequences[i].Item3);
                                rgSequence1.Add(rgrgRes[i][j]);

                                rgCandidates.Add(new Tuple <double, bool, List <Tuple <string, int, double> > >(dfScore, false, rgSequence1));
                                nProcessedCount++;
                            }
                        }
                    }
                    else
                    {
                        rgCandidates.Add(new Tuple <double, bool, List <Tuple <string, int, double> > >(rgSequences[i].Item1, true, rgSequences[i].Item3));
                    }
                }

                if (nProcessedCount > 0)
                {
                    rgSequences = rgCandidates.OrderBy(p => p.Item1).Take(nK).ToList();
                    rgrgRes     = new List <List <Tuple <string, int, double> > >();

                    for (int i = 0; i < rgSequences.Count; i++)
                    {
                        if (!rgSequences[i].Item2)
                        {
                            rgRes = new List <Tuple <string, int, double> >();

                            // Reset state.
                            m_layer.PreProcessInput(strInput, 1, colBottom);
                            m_net.Forward(colBottom, out dfLoss, true);

                            // Re-run through each branch to get correct state at the leaf
                            for (int j = 0; j < rgSequences[i].Item3.Count; j++)
                            {
                                int nIdx = rgSequences[i].Item3[j].Item2;

                                m_layer.PreProcessInput(strInput, nIdx, colBottom);
                                colTop = m_net.Forward(colBottom, out dfLoss, true);

                                if (j == rgSequences[i].Item3.Count - 1)
                                {
                                    List <Tuple <string, int, double> > rgRes1 = m_layer.PostProcessOutput(colTop[0], nK);
                                    rgRes1 = rgRes1.Where(p => p.Item3 >= dfThreshold).ToList();

                                    for (int k = 0; k < rgRes1.Count; k++)
                                    {
                                        if (rgRes1[k].Item1.Length > 0)
                                        {
                                            rgRes.Add(rgRes1[k]);
                                        }
                                        else
                                        {
                                            Trace.WriteLine("EOS");
                                        }
                                    }

                                    rgrgRes.Add(rgRes);
                                }
                            }
                        }
                        else
                        {
                            rgrgRes.Add(new List <Tuple <string, int, double> >());
                        }
                    }
                }
                else
                {
                    bDone = true;
                }

                nMax--;
            }

            return(rgSequences);
        }
Example #4
0
 /// <summary>
 /// Adds a Layer and its Bottom and Top blob collections.
 /// </summary>
 /// <param name="layer">Specifies the Layer.</param>
 /// <param name="colBottomBlobs">Specifies the Bottom Blobs flowing into the Layer.</param>
 /// <param name="colTopBlobs">Specifies the Top Blobs flowing out of the Layer.</param>
 public void Add(Layer <T> layer, BlobCollection <T> colBottomBlobs, BlobCollection <T> colTopBlobs)
 {
     m_rgLayerInfo.Add(new LayerDebugInformation <T>(layer, colBottomBlobs, colTopBlobs, m_blobWork, m_bDetectNans));
 }
Example #5
0
 /// <summary>
 /// The constructor.
 /// </summary>
 /// <param name="res">Specifies the results returned after running the test.</param>
 public TestResultArgs(BlobCollection <T> res)
 {
     m_results    = res;
     m_dfAccuracy = 0;
 }