示例#1
0
        /// <summary>
        /// Process one input sample.
        /// This method is called by outer loop code outside the nupic-engine. We
        /// use this instead of the nupic engine compute() because our inputs and
        /// outputs aren't fixed size vectors of reals.
        /// </summary>
        /// <typeparam name="T"></typeparam>
        /// <param name="recordNum">Record number of this input pattern. Record numbers should
        /// normally increase sequentially by 1 each time unless there
        /// are missing records in the dataset. Knowing this information
        /// insures that we don't get confused by missing records.</param>
        /// <param name="classification">Map of the classification information:
        /// bucketIdx: index of the encoder bucket
        /// actValue:  actual value going into the encoder</param>
        /// <param name="patternNZ">list of the active indices from the output below</param>
        /// <param name="learn">if true, learn this sample</param>
        /// <param name="infer">if true, perform inference</param>
        /// <returns>dict containing inference results, there is one entry for each
        /// step in steps, where the key is the number of steps, and
        /// the value is an array containing the relative likelihood for
        /// each bucketIdx starting from bucketIdx 0.
        ///
        /// There is also an entry containing the average actual value to
        /// use for each bucket. The key is 'actualValues'.
        ///
        /// for example:
        /// {
        ///     1 :             [0.1, 0.3, 0.2, 0.7],
        ///     4 :             [0.2, 0.4, 0.3, 0.5],
        ///     'actualValues': [1.5, 3,5, 5,5, 7.6],
        /// }
        /// </returns>
        public Classification <T> Compute <T>(int recordNum, IDictionary <string, object> classification, int[] patternNZ, bool learn, bool infer)
        {
            Classification <T> retVal = new Classification <T>();

            //List<T> actualValues = this.actualValues.Select(av => av == null ? default(T) : (T)av).ToList();

            // Save the offset between recordNum and learnIteration if this is the first
            // compute
            if (_recordNumMinusLearnIteration == -1)
            {
                _recordNumMinusLearnIteration = recordNum - _learnIteration;
            }

            // Update the learn iteration
            _learnIteration = recordNum - _recordNumMinusLearnIteration;

            if (Verbosity >= 1)
            {
                Console.WriteLine(String.Format("\n{0}: compute ", g_debugPrefix));
                Console.WriteLine(" recordNum: " + recordNum);
                Console.WriteLine(" learnIteration: " + _learnIteration);
                Console.WriteLine(String.Format(" patternNZ({0}): {1}", patternNZ.Length, Arrays.ToString(patternNZ)));
                Console.WriteLine(" classificationIn: " + classification);
            }

            _patternNzHistory.Append(new Tuple(_learnIteration, patternNZ));

            //------------------------------------------------------------------------
            // Inference:
            // For each active bit in the activationPattern, get the classification
            // votes
            //
            // Return value dict. For buckets which we don't have an actual value
            // for yet, just plug in any valid actual value. It doesn't matter what
            // we use because that bucket won't have non-zero likelihood anyways.
            if (infer)
            {
                // NOTE: If doing 0-step prediction, we shouldn't use any knowledge
                //		 of the classification input during inference.
                object defaultValue = null;
                if (Steps[0] == 0)
                {
                    defaultValue = 0;
                }
                else
                {
                    defaultValue = classification.GetOrDefault("actValue", null);
                }

                T[] actValues = new T[this._actualValues.Count];
                for (int i = 0; i < _actualValues.Count; i++)
                {
                    //if (EqualityComparer<T>.Default.Equals(actualValues[i], default(T)))  //actualValues[i] == default(T))
                    if (_actualValues[i] == null)
                    {
                        actValues[i] = defaultValue != null?TypeConverter.Convert <T>(defaultValue) : default(T);

                        //(T) (defaultValue ?? default(T));
                    }
                    else
                    {
                        actValues[i] = (T)_actualValues[i];
                    }
                    //actValues[i] = actualValues[i].CompareTo(default(T)) == 0 ? defaultValue : actualValues[i];
                }

                retVal.SetActualValues(actValues);

                // For each n-step prediction...
                foreach (int nSteps in Steps.ToArray())
                {
                    // Accumulate bucket index votes and actValues into these arrays
                    double[] sumVotes = new double[_maxBucketIdx + 1];
                    double[] bitVotes = new double[_maxBucketIdx + 1];

                    foreach (int bit in patternNZ)
                    {
                        Tuple      key     = new Tuple(bit, nSteps);
                        BitHistory history = _activeBitHistory.GetOrDefault(key, null);
                        if (history == null)
                        {
                            continue;
                        }

                        history.Infer(_learnIteration, bitVotes);

                        sumVotes = ArrayUtils.Add(sumVotes, bitVotes);
                    }

                    // Return the votes for each bucket, normalized
                    double total = ArrayUtils.Sum(sumVotes);
                    if (total > 0)
                    {
                        sumVotes = ArrayUtils.Divide(sumVotes, total);
                    }
                    else
                    {
                        // If all buckets have zero probability then simply make all of the
                        // buckets equally likely. There is no actual prediction for this
                        // timestep so any of the possible predictions are just as good.
                        if (sumVotes.Length > 0)
                        {
                            Arrays.Fill(sumVotes, 1.0 / (double)sumVotes.Length);
                        }
                    }

                    retVal.SetStats(nSteps, sumVotes);
                }
            }

            // ------------------------------------------------------------------------
            // Learning:
            // For each active bit in the activationPattern, store the classification
            // info. If the bucketIdx is None, we can't learn. This can happen when the
            // field is missing in a specific record.
            if (learn && classification.GetOrDefault("bucketIdx", null) != null)
            {
                // Get classification info
                int    bucketIdx = (int)(classification["bucketIdx"]);
                object actValue  = classification["actValue"];

                // Update maxBucketIndex
                _maxBucketIdx = Math.Max(_maxBucketIdx, bucketIdx);

                // Update rolling average of actual values if it's a scalar. If it's
                // not, it must be a category, in which case each bucket only ever
                // sees one category so we don't need a running average.
                while (_maxBucketIdx > _actualValues.Count - 1)
                {
                    _actualValues.Add(null);
                }
                if (_actualValues[bucketIdx] == null)
                {
                    _actualValues[bucketIdx] = TypeConverter.Convert <T>(actValue);
                }
                else
                {
                    if (typeof(double).IsAssignableFrom(actValue.GetType()))
                    {
                        Double val = ((1.0 - _actValueAlpha) * (TypeConverter.Convert <double>(_actualValues[bucketIdx])) +
                                      _actValueAlpha * (TypeConverter.Convert <double>(actValue)));
                        _actualValues[bucketIdx] = TypeConverter.Convert <T>(val);
                    }
                    else
                    {
                        _actualValues[bucketIdx] = TypeConverter.Convert <T>(actValue);
                    }
                }

                // Train each pattern that we have in our history that aligns with the
                // steps we have in steps
                int   nSteps         = -1;
                int   iteration      = 0;
                int[] learnPatternNZ = null;
                foreach (int n in Steps.ToArray())
                {
                    nSteps = n;
                    // Do we have the pattern that should be assigned to this classification
                    // in our pattern history? If not, skip it
                    bool found = false;
                    foreach (Tuple t in _patternNzHistory)
                    {
                        iteration = TypeConverter.Convert <int>(t.Get(0));

                        var tuplePos1 = t.Get(1);
                        if (tuplePos1 is JArray)
                        {
                            JArray arr = (JArray)tuplePos1;
                            learnPatternNZ = arr.Values <int>().ToArray();
                        }
                        else
                        {
                            learnPatternNZ = (int[])t.Get(1);
                        }

                        if (iteration == _learnIteration - nSteps)
                        {
                            found = true;
                            break;
                        }
                        iteration++;
                    }
                    if (!found)
                    {
                        continue;
                    }

                    // Store classification info for each active bit from the pattern
                    // that we got nSteps time steps ago.
                    foreach (int bit in learnPatternNZ)
                    {
                        // Get the history structure for this bit and step
                        Tuple      key     = new Tuple(bit, nSteps);
                        BitHistory history = _activeBitHistory.GetOrDefault(key, null);
                        if (history == null)
                        {
                            _activeBitHistory.Add(key, history = new BitHistory(this, bit, nSteps));
                        }
                        history.Store(_learnIteration, bucketIdx);
                    }
                }
            }

            if (infer && Verbosity >= 1)
            {
                Console.WriteLine(" inference: combined bucket likelihoods:");
                Console.WriteLine("   actual bucket values: " + Arrays.ToString((T[])retVal.GetActualValues()));

                foreach (int key in retVal.StepSet())
                {
                    if (retVal.GetActualValue(key) == null)
                    {
                        continue;
                    }

                    Object[] actual = new Object[] { (T)retVal.GetActualValue(key) };
                    Console.WriteLine(String.Format("  {0} steps: {1}", key, PFormatArray(actual)));
                    int bestBucketIdx = retVal.GetMostProbableBucketIndex(key);
                    Console.WriteLine(String.Format("   most likely bucket idx: {0}, value: {1} ", bestBucketIdx,
                                                    retVal.GetActualValue(bestBucketIdx)));
                }
            }

            return(retVal);
        }
示例#2
0
        public Classification <T> Compute <T>(int recordNum, IDictionary <string, object> classification, int[] patternNZ,
                                              bool learn, bool infer)
        {
            if (learn == false && infer == false)
            {
                throw new InvalidOperationException("learn and infer cannot be both false");
            }

            // Save the offset between recordNum and learnIteration if this is the first
            //  compute
            if (_recordNumMinusLearnIteration == null)
            {
                _recordNumMinusLearnIteration = recordNum - _learnIteration;
            }
            // Update the learn iteration
            _learnIteration = recordNum - _recordNumMinusLearnIteration.GetValueOrDefault();

            if (Verbosity >= 1)
            {
                Console.WriteLine(String.Format("\n{0}: compute ", g_debugPrefix));
                Console.WriteLine(" recordNum: " + recordNum);
                Console.WriteLine(" learnIteration: " + _learnIteration);
                Console.WriteLine(String.Format(" patternNZ({0}): {1}", patternNZ.Length, Arrays.ToString(patternNZ)));
                Console.WriteLine(" classificationIn: " + classification);
            }

            // Store pattern in our history
            _patternNZHistory.Append(new Tuple(_learnIteration, patternNZ));

            // To allow multi-class classification, we need to be able to run learning
            // without inference being on. So initialize retval outside
            // of the inference block.
            Classification <T> retVal = null;

            // Update maxInputIdx and augment weight matrix with zero padding
            if (patternNZ.Max() > _maxInputIdx)
            {
                int newMaxInputIdx = patternNZ.Max();
                foreach (int nSteps in Steps)
                {
                    var subMatrix = ArrayUtils.CreateJaggedArray <double>(newMaxInputIdx - _maxInputIdx, _maxBucketIdx + 1);
                    _weightMatrix[nSteps] = ArrayUtils.Concatinate(_weightMatrix[nSteps], subMatrix, 0);
                }
                _maxInputIdx = newMaxInputIdx;
            }

            // --------------------------------------------------------------------
            // Inference:
            // For each active bit in the activationPattern, get the classification votes
            if (infer)
            {
                retVal = Infer <T>(patternNZ, classification);
            }

            if (learn && classification["bucketIdx"] != null)
            {
                // Get classification info
                int    bucketIdx = (int)classification["bucketIdx"];
                object actValue  = classification["actValue"];

                // Update maxBucketIndex and augment weight matrix with zero padding
                if (bucketIdx > _maxBucketIdx)
                {
                    foreach (int nSteps in Steps)
                    {
                        var subMatrix = ArrayUtils.CreateJaggedArray <double>(_maxInputIdx + 1, bucketIdx - _maxBucketIdx);
                        _weightMatrix[nSteps] = ArrayUtils.Concatinate(_weightMatrix[nSteps], subMatrix, 1);
                    }
                    _maxBucketIdx = bucketIdx;
                }

                // Update rolling average of actual values if it's a scalar. If it's
                // not, it must be a category, in which case each bucket only ever
                // sees one category so we don't need a running average.
                while (_maxBucketIdx > _actualValues.Count - 1)
                {
                    _actualValues.Add(null);
                }
                if (_actualValues[bucketIdx] == null)
                {
                    _actualValues[bucketIdx] = actValue;
                }
                else
                {
                    if (actValue is int || actValue is double || actValue is long)
                    {
                        if (actValue is int)
                        {
                            _actualValues[bucketIdx] = ((1.0 - _actValueAlpha) * TypeConverter.Convert <double>(_actualValues[bucketIdx]) +
                                                        _actValueAlpha * (int)actValue);
                        }
                        if (actValue is double)
                        {
                            _actualValues[bucketIdx] = ((1.0 - _actValueAlpha) * (double)_actualValues[bucketIdx] +
                                                        _actValueAlpha * (double)actValue);
                        }
                        if (actValue is long)
                        {
                            _actualValues[bucketIdx] = ((1.0 - _actValueAlpha) * TypeConverter.Convert <double>(_actualValues[bucketIdx]) +
                                                        _actValueAlpha * (long)actValue);
                        }
                    }
                    else
                    {
                        _actualValues[bucketIdx] = actValue;
                    }
                }
                foreach (var tuple in _patternNZHistory)
                {
                    var iteration      = (int)tuple.Get(0);
                    var learnPatternNZ = (int[])tuple.Get(1);

                    var error = CalculateError(classification);

                    int nSteps = _learnIteration - iteration;
                    if (Steps.Contains(nSteps))
                    {
                        foreach (int bit in learnPatternNZ)
                        {
                            var multipliedRow = ArrayUtils.Multiply(error[nSteps], Alpha);
                            _weightMatrix[nSteps][bit] = ArrayUtils.Add(multipliedRow, _weightMatrix[nSteps][bit]);
                        }
                    }
                }
            }
            // ------------------------------------------------------------------------
            // Verbose print
            if (infer && Verbosity >= 1)
            {
                Console.WriteLine(" inference: combined bucket likelihoods:");
                Console.WriteLine("   actual bucket values: " + Arrays.ToString(retVal.GetActualValues()));

                foreach (int key in retVal.StepSet())
                {
                    if (retVal.GetActualValue(key) == null)
                    {
                        continue;
                    }

                    Object[] actual = new Object[] { retVal.GetActualValue(key) };
                    Console.WriteLine(String.Format("  {0} steps: {1}", key, PFormatArray(actual)));
                    int bestBucketIdx = retVal.GetMostProbableBucketIndex(key);
                    Console.WriteLine(String.Format("   most likely bucket idx: {0}, value: {1} ", bestBucketIdx,
                                                    retVal.GetActualValue(bestBucketIdx)));
                }

                /*
                 * print "  inference: combined bucket likelihoods:"
                 * print "    actual bucket values:", retval["actualValues"]
                 * for (nSteps, votes) in retval.items():
                 *  if nSteps == "actualValues":
                 *    continue
                 *  print "    %d steps: " % (nSteps), _pFormatArray(votes)
                 *  bestBucketIdx = votes.argmax()
                 *  print ("      most likely bucket idx: "
                 *         "%d, value: %s" % (bestBucketIdx,
                 *                            retval["actualValues"][bestBucketIdx]))
                 * print
                 */
            }
            return(retVal);
        }