private IValueList GetTestHeaderOff() { Tuple[] ta = new Tuple[] { new Tuple("timestamp", "consumption"), new Tuple("datetime", "float"), new Tuple("T"), }; return(new MockValueList { GetRowFunc = row => ta[row], SizeFunc = () => ta.Length }); //return new ValueList() { // public Tuple getRow(int row) // { // return ta[row]; // } // public int size() // { // return ta.Length; // } //}; }
public void TestDateEncoder() { SetUp(); InitDe(); List <Tuple> descs = de.GetDescription(); Assert.IsNotNull(descs); // should be [("season", 0), ("day of week", 12), ("weekend", 19), ("time of day", 25)] List <Tuple> expectedDescs = new List <Tuple> { new Tuple("season", 0), new Tuple("day of week", 12), new Tuple("weekend", 19), new Tuple("time of day", 25) }; Assert.AreEqual(expectedDescs.Count, descs.Count); for (int i = 0; i < expectedDescs.Count; ++i) { Tuple desc = descs[i]; Assert.IsNotNull(desc); Assert.AreEqual(expectedDescs[i], desc); } Assert.IsTrue(expected.SequenceEqual(bits)); Console.WriteLine(); de.PPrintHeader(""); de.PPrint(bits, ""); Console.WriteLine(); }
/** * Calculate the active cells, using the current active columns and dendrite * segments. Grow and reinforce synapses. * * <pre> * Pseudocode: * for each column * if column is active and has active distal dendrite segments * call activatePredictedColumn * if column is active and doesn't have active distal dendrite segments * call burstColumn * if column is inactive and has matching distal dendrite segments * call punishPredictedColumn * * </pre> * * @param conn * @param activeColumnIndices * @param learn */ public void ActivateCells(Connections conn, ComputeCycle cycle, int[] activeColumnIndices, bool learn) { ColumnData columnData = new ColumnData(); HashSet <Cell> prevActiveCells = conn.GetActiveCells(); HashSet <Cell> prevWinnerCells = conn.GetWinnerCells(); List <Column> activeColumns = activeColumnIndices .OrderBy(i => i) .Select(i => conn.GetColumn(i)) .ToList(); Func <Column, Column> identity = c => c; Func <DistalDendrite, Column> segToCol = segment => segment.GetParentCell().GetColumn(); //@SuppressWarnings({ "rawtypes" }) GroupBy2 <Column> grouper = GroupBy2 <Column> .Of( new Tuple <List <object>, Func <object, Column> >(activeColumns.Cast <object>().ToList(), x => identity((Column)x)), new Tuple <List <object>, Func <object, Column> >(new List <DistalDendrite>(conn.GetActiveSegments()).Cast <object>().ToList(), x => segToCol((DistalDendrite)x)), new Tuple <List <object>, Func <object, Column> >(new List <DistalDendrite>(conn.GetMatchingSegments()).Cast <object>().ToList(), x => segToCol((DistalDendrite)x))); double permanenceIncrement = conn.GetPermanenceIncrement(); double permanenceDecrement = conn.GetPermanenceDecrement(); foreach (Tuple t in grouper) { columnData = columnData.Set(t); if (columnData.IsNotNone(ACTIVE_COLUMNS)) { if (columnData.ActiveSegments().Any()) { List <Cell> cellsToAdd = ActivatePredictedColumn(conn, columnData.ActiveSegments(), columnData.MatchingSegments(), prevActiveCells, prevWinnerCells, permanenceIncrement, permanenceDecrement, learn); cycle.ActiveCells().UnionWith(cellsToAdd); cycle.WinnerCells().UnionWith(cellsToAdd); } else { Tuple cellsXwinnerCell = BurstColumn(conn, columnData.Column(), columnData.MatchingSegments(), prevActiveCells, prevWinnerCells, permanenceIncrement, permanenceDecrement, conn.GetRandom(), learn); cycle.ActiveCells().UnionWith((IEnumerable <Cell>)cellsXwinnerCell.Get(0)); cycle.WinnerCells().Add((Cell)cellsXwinnerCell.Get(1)); } } else { if (learn) { PunishPredictedColumn(conn, columnData.ActiveSegments(), columnData.MatchingSegments(), prevActiveCells, prevWinnerCells, conn.GetPredictedSegmentDecrement()); } } } }
/** * Package private to encourage construction using the Builder Pattern * but still allow inheritance. */ internal CoordinateEncoder() { /* * description has a {@link List} of {@link Tuple}s containing */ Tuple desc = new Tuple("coordinate", 0); Tuple desc2 = new Tuple("radius", 1); description.Add(desc); description.Add(desc2); }
public void AddEncoder(string fieldName, string encoderName, IEncoder child) { base.AddEncoder(this, fieldName, encoderName, child, width); foreach (Tuple d in child.GetDescription()) { Tuple dT = d; description.Add(new Tuple(dT.Get(0), (int)dT.Get(1) + GetWidth())); } width += child.GetWidth(); }
public void TestWeekend() { //use of forced is not recommended, used here for readability, see ScalarEncoder DateEncoder e = (DateEncoder)((DateEncoder.Builder)DateEncoder.GetBuilder()).CustomDays(21, new List <string> { "sat", "sun", "fri" }).Forced(true).Build(); DateEncoder mon = (DateEncoder)((DateEncoder.Builder)DateEncoder.GetBuilder()).CustomDays(21, new List <string> { "Monday" }) .Forced(true).Build(); DateEncoder e2 = (DateEncoder)((DateEncoder.Builder)DateEncoder.GetBuilder()).Weekend(21, 1).Forced(true).Build(); //DateTime d = new DateTime(1988,5,29,20,0); DateTime d = new DateTime(1988, 5, 29, 20, 0, 0); Console.WriteLine("DateEncoderTest.testWeekend(): e.encode(d) = " + Arrays.ToString(e.Encode(d))); Console.WriteLine("DateEncoderTest.testWeekend(): e2.encode(d) = " + Arrays.ToString(e2.Encode(d))); Assert.IsTrue(e.Encode(d).SequenceEqual(e2.Encode(d))); for (int i = 0; i < 300; i++) { DateTime curDate = d.AddDays(i + 1); Assert.IsTrue(e.Encode(curDate).SequenceEqual(e2.Encode(curDate))); //Make sure Tuple decoded = mon.Decode(mon.Encode(curDate), null); Map <String, RangeList> fieldsMap = (Map <String, RangeList>)decoded.Get(0); List <String> fieldsOrder = (List <String>)decoded.Get(1); Assert.IsNotNull(fieldsMap); Assert.IsNotNull(fieldsOrder); Assert.AreEqual(1, fieldsMap.Count); RangeList range = fieldsMap["Monday"]; Assert.AreEqual(1, range.Count); Assert.AreEqual(1, ((List <MinMax>)range.Get(0)).Count); MinMax minmax = range.GetRange(0); Console.WriteLine("DateEncoderTest.testWeekend(): minmax.min() = {0} -> {1}", minmax.Min(), curDate.DayOfWeek); if (minmax.Min() == 1.0) { Assert.AreEqual(1, (int)curDate.DayOfWeek); } else { Assert.AreNotEqual(1, (int)curDate.DayOfWeek); } } }
private IValueList GetTestHeaderLearn() { Tuple[] ta = new Tuple[] { new Tuple("timestamp", "consumption"), new Tuple("datetime", "float"), new Tuple("T", "B", "L"), }; return(new MockValueList { GetRowFunc = row => ta[row], SizeFunc = () => ta.Length }); }
/** * {@inheritDoc} */ public override void EncodeIntoArray(Tuple inputData, int[] output) { List <int[]> neighs = Neighbors((int[])inputData.Get(0), (double)inputData.Get(1)); int[][] neighbors = new int[neighs.Count][]; for (int i = 0; i < neighs.Count; i++) { neighbors[i] = neighs[i]; } int[][] winners = TopWCoordinates(this, neighbors, w); for (int i = 0; i < winners.Length; i++) { int bit = BitForCoordinate(winners[i], n); output[bit] = 1; } }
public void TestDecoding() { SetUp(); InitDe(); //TODO Why null is needed? Tuple decoded = de.Decode(bits, null); Console.WriteLine(decoded.ToString()); Console.WriteLine(String.Format("decodedToStr=>{0}", de.DecodedToStr(decoded))); Map <String, RangeList> fieldsMap = (Map <String, RangeList>)decoded.Get(0); List <String> fieldsOrder = (List <String>)decoded.Get(1); Assert.IsNotNull(fieldsMap); Assert.IsNotNull(fieldsOrder); Assert.AreEqual(4, fieldsMap.Count); Map <String, Double> expectedMap = new Map <String, Double>(); expectedMap.Add("season", 305.0); expectedMap.Add("time of day", 14.4); expectedMap.Add("day of week", 3.0); expectedMap.Add("weekend", 0.0); foreach (String key in expectedMap.Keys) { double expected = expectedMap[key]; RangeList actual = fieldsMap[key]; Assert.AreEqual(1, actual.Count); MinMax minmax = actual.GetRange(0); Assert.AreEqual(expected, minmax.Min(), de.GetResolution()); Assert.AreEqual(expected, minmax.Max(), de.GetResolution()); } Console.WriteLine(decoded.ToString()); Console.WriteLine(String.Format("decodedToStr=>{0}", de.DecodedToStr(decoded))); }
/// <summary> /// Process one input sample. /// This method is called by outer loop code outside the nupic-engine. We /// use this instead of the nupic engine compute() because our inputs and /// outputs aren't fixed size vectors of reals. /// </summary> /// <typeparam name="T"></typeparam> /// <param name="recordNum">Record number of this input pattern. Record numbers should /// normally increase sequentially by 1 each time unless there /// are missing records in the dataset. Knowing this information /// insures that we don't get confused by missing records.</param> /// <param name="classification">Map of the classification information: /// bucketIdx: index of the encoder bucket /// actValue: actual value going into the encoder</param> /// <param name="patternNZ">list of the active indices from the output below</param> /// <param name="learn">if true, learn this sample</param> /// <param name="infer">if true, perform inference</param> /// <returns>dict containing inference results, there is one entry for each /// step in steps, where the key is the number of steps, and /// the value is an array containing the relative likelihood for /// each bucketIdx starting from bucketIdx 0. /// /// There is also an entry containing the average actual value to /// use for each bucket. The key is 'actualValues'. /// /// for example: /// { /// 1 : [0.1, 0.3, 0.2, 0.7], /// 4 : [0.2, 0.4, 0.3, 0.5], /// 'actualValues': [1.5, 3,5, 5,5, 7.6], /// } /// </returns> public Classification <T> Compute <T>(int recordNum, IDictionary <string, object> classification, int[] patternNZ, bool learn, bool infer) { Classification <T> retVal = new Classification <T>(); //List<T> actualValues = this.actualValues.Select(av => av == null ? default(T) : (T)av).ToList(); // Save the offset between recordNum and learnIteration if this is the first // compute if (_recordNumMinusLearnIteration == -1) { _recordNumMinusLearnIteration = recordNum - _learnIteration; } // Update the learn iteration _learnIteration = recordNum - _recordNumMinusLearnIteration; if (Verbosity >= 1) { Console.WriteLine(String.Format("\n{0}: compute ", g_debugPrefix)); Console.WriteLine(" recordNum: " + recordNum); Console.WriteLine(" learnIteration: " + _learnIteration); Console.WriteLine(String.Format(" patternNZ({0}): {1}", patternNZ.Length, Arrays.ToString(patternNZ))); Console.WriteLine(" classificationIn: " + classification); } _patternNzHistory.Append(new Tuple(_learnIteration, patternNZ)); //------------------------------------------------------------------------ // Inference: // For each active bit in the activationPattern, get the classification // votes // // Return value dict. For buckets which we don't have an actual value // for yet, just plug in any valid actual value. It doesn't matter what // we use because that bucket won't have non-zero likelihood anyways. if (infer) { // NOTE: If doing 0-step prediction, we shouldn't use any knowledge // of the classification input during inference. object defaultValue = null; if (Steps[0] == 0) { defaultValue = 0; } else { defaultValue = classification.GetOrDefault("actValue", null); } T[] actValues = new T[this._actualValues.Count]; for (int i = 0; i < _actualValues.Count; i++) { //if (EqualityComparer<T>.Default.Equals(actualValues[i], default(T))) //actualValues[i] == default(T)) if (_actualValues[i] == null) { actValues[i] = defaultValue != null?TypeConverter.Convert <T>(defaultValue) : default(T); //(T) (defaultValue ?? default(T)); } else { actValues[i] = (T)_actualValues[i]; } //actValues[i] = actualValues[i].CompareTo(default(T)) == 0 ? defaultValue : actualValues[i]; } retVal.SetActualValues(actValues); // For each n-step prediction... foreach (int nSteps in Steps.ToArray()) { // Accumulate bucket index votes and actValues into these arrays double[] sumVotes = new double[_maxBucketIdx + 1]; double[] bitVotes = new double[_maxBucketIdx + 1]; foreach (int bit in patternNZ) { Tuple key = new Tuple(bit, nSteps); BitHistory history = _activeBitHistory.GetOrDefault(key, null); if (history == null) { continue; } history.Infer(_learnIteration, bitVotes); sumVotes = ArrayUtils.Add(sumVotes, bitVotes); } // Return the votes for each bucket, normalized double total = ArrayUtils.Sum(sumVotes); if (total > 0) { sumVotes = ArrayUtils.Divide(sumVotes, total); } else { // If all buckets have zero probability then simply make all of the // buckets equally likely. There is no actual prediction for this // timestep so any of the possible predictions are just as good. if (sumVotes.Length > 0) { Arrays.Fill(sumVotes, 1.0 / (double)sumVotes.Length); } } retVal.SetStats(nSteps, sumVotes); } } // ------------------------------------------------------------------------ // Learning: // For each active bit in the activationPattern, store the classification // info. If the bucketIdx is None, we can't learn. This can happen when the // field is missing in a specific record. if (learn && classification.GetOrDefault("bucketIdx", null) != null) { // Get classification info int bucketIdx = (int)(classification["bucketIdx"]); object actValue = classification["actValue"]; // Update maxBucketIndex _maxBucketIdx = Math.Max(_maxBucketIdx, bucketIdx); // Update rolling average of actual values if it's a scalar. If it's // not, it must be a category, in which case each bucket only ever // sees one category so we don't need a running average. while (_maxBucketIdx > _actualValues.Count - 1) { _actualValues.Add(null); } if (_actualValues[bucketIdx] == null) { _actualValues[bucketIdx] = TypeConverter.Convert <T>(actValue); } else { if (typeof(double).IsAssignableFrom(actValue.GetType())) { Double val = ((1.0 - _actValueAlpha) * (TypeConverter.Convert <double>(_actualValues[bucketIdx])) + _actValueAlpha * (TypeConverter.Convert <double>(actValue))); _actualValues[bucketIdx] = TypeConverter.Convert <T>(val); } else { _actualValues[bucketIdx] = TypeConverter.Convert <T>(actValue); } } // Train each pattern that we have in our history that aligns with the // steps we have in steps int nSteps = -1; int iteration = 0; int[] learnPatternNZ = null; foreach (int n in Steps.ToArray()) { nSteps = n; // Do we have the pattern that should be assigned to this classification // in our pattern history? If not, skip it bool found = false; foreach (Tuple t in _patternNzHistory) { iteration = TypeConverter.Convert <int>(t.Get(0)); var tuplePos1 = t.Get(1); if (tuplePos1 is JArray) { JArray arr = (JArray)tuplePos1; learnPatternNZ = arr.Values <int>().ToArray(); } else { learnPatternNZ = (int[])t.Get(1); } if (iteration == _learnIteration - nSteps) { found = true; break; } iteration++; } if (!found) { continue; } // Store classification info for each active bit from the pattern // that we got nSteps time steps ago. foreach (int bit in learnPatternNZ) { // Get the history structure for this bit and step Tuple key = new Tuple(bit, nSteps); BitHistory history = _activeBitHistory.GetOrDefault(key, null); if (history == null) { _activeBitHistory.Add(key, history = new BitHistory(this, bit, nSteps)); } history.Store(_learnIteration, bucketIdx); } } } if (infer && Verbosity >= 1) { Console.WriteLine(" inference: combined bucket likelihoods:"); Console.WriteLine(" actual bucket values: " + Arrays.ToString((T[])retVal.GetActualValues())); foreach (int key in retVal.StepSet()) { if (retVal.GetActualValue(key) == null) { continue; } Object[] actual = new Object[] { (T)retVal.GetActualValue(key) }; Console.WriteLine(String.Format(" {0} steps: {1}", key, PFormatArray(actual))); int bestBucketIdx = retVal.GetMostProbableBucketIndex(key); Console.WriteLine(String.Format(" most likely bucket idx: {0}, value: {1} ", bestBucketIdx, retVal.GetActualValue(bestBucketIdx))); } } return(retVal); }
/** * Initializes the {@link DateEncoder.Builder} specified * @param b the builder on which to set the mapping. * @param m the map containing the values * @param key the key to be set. */ private static void SetDateFieldBits(DateEncoder.Builder b, Map <string, object> m, string key) { Tuple t = (Tuple)m[key]; switch (key) { case "season": { if (t.Count > 1 && (TypeConverter.Convert <double>(t.Get(1))) > 0.0) { b.Season((int)t.Get(0), TypeConverter.Convert <double>(t.Get(1))); } else { b.Season((int)t.Get(0)); } break; } case "dayOfWeek": { if (t.Count > 1 && (TypeConverter.Convert <double>(t.Get(1)) > 0.0)) { b.DayOfWeek((int)t.Get(0), TypeConverter.Convert <double>(t.Get(1))); } else { b.DayOfWeek((int)t.Get(0)); } break; } case "weekend": { if (t.Count > 1 && (TypeConverter.Convert <double>(t.Get(1))) > 0.0) { b.Weekend((int)t.Get(0), TypeConverter.Convert <double>(t.Get(1))); } else { b.Weekend((int)t.Get(0)); } break; } case "holiday": { if (t.Count > 1 && (TypeConverter.Convert <double>(t.Get(1))) > 0.0) { b.Holiday((int)t.Get(0), TypeConverter.Convert <double>(t.Get(1))); } else { b.Holiday((int)t.Get(0)); } break; } case "timeOfDay": { if (t.Count > 1 && (TypeConverter.Convert <double>(t.Get(1))) > 0.0) { b.TimeOfDay((int)t.Get(0), TypeConverter.Convert <double>(t.Get(1))); } else { b.TimeOfDay((int)t.Get(0)); } break; } case "customDays": { if (t.Count > 1 && (TypeConverter.Convert <double>(t.Get(1))) > 0.0) { b.CustomDays((int)t.Get(0), (List <string>)t.Get(1)); } else { b.CustomDays((int)t.Get(0)); } break; } default: break; } }
public ColumnData Set(Tuple t) { this.t = t; return(this); }
public ColumnData(Tuple t) { this.t = t; }
/** * Returns a {@link DecodeResult} which is a tuple of range names * and lists of {@link RangeLists} in the first entry, and a list * of descriptions for each range in the second entry. * * @param encoded the encoded bit vector * @param parentFieldName the field the vector corresponds with * @return */ public override Tuple Decode(int[] encoded, string parentFieldName) // returns DecodeResult { // For now, we simply assume any top-down output greater than 0 // is ON. Eventually, we will probably want to incorporate the strength // of each top-down output. if (encoded == null || encoded.Length < 1) { return(null); } int[] tmpOutput = Arrays.CopyOf(encoded, encoded.Length); // ------------------------------------------------------------------------ // First, assume the input pool is not sampled 100%, and fill in the // "holes" in the encoded representation (which are likely to be present // if this is a coincidence that was learned by the SP). // Search for portions of the output that have "holes" int maxZerosInARow = GetHalfWidth(); for (int wi = 0; wi < maxZerosInARow; wi++) { int[] searchStr = new int[wi + 3]; Arrays.Fill(searchStr, 1); ArrayUtils.SetRangeTo(searchStr, 1, -1, 0); int subLen = searchStr.Length; // Does this search string appear in the output? if (IsPeriodic()) { for (int j = 0; j < GetN(); j++) { int[] outputIndices = ArrayUtils.Range(j, j + subLen); outputIndices = ArrayUtils.Modulo(outputIndices, GetN()); if (Arrays.AreEqual(searchStr, ArrayUtils.Sub(tmpOutput, outputIndices))) { ArrayUtils.SetIndexesTo(tmpOutput, outputIndices, 1); } } } else { for (int j = 0; j < GetN() - subLen + 1; j++) { if (Arrays.AreEqual(searchStr, ArrayUtils.Sub(tmpOutput, ArrayUtils.Range(j, j + subLen)))) { ArrayUtils.SetRangeTo(tmpOutput, j, j + subLen, 1); } } } } LOGGER.Debug("raw output:" + Arrays.ToString( ArrayUtils.Sub(encoded, ArrayUtils.Range(0, GetN())))); LOGGER.Debug("filtered output:" + Arrays.ToString(tmpOutput)); // ------------------------------------------------------------------------ // Find each run of 1's. //int[] nz = tmpOutput.Where(n => n > 0).ToArray(); int[] nz = ArrayUtils.Where(tmpOutput, x => x > 0); // int[] nz = ArrayUtils.Where(tmpOutput, new Condition.Adapter<Integer>() { // @Override // public boolean eval(int n) // { // return n > 0; // } //}); List <Tuple> runs = new List <Tuple>(); //will be tuples of (startIdx, runLength) Array.Sort(nz); int[] run = new int[] { nz[0], 1 }; int i = 1; while (i < nz.Length) { if (nz[i] == run[0] + run[1]) { run[1] += 1; } else { runs.Add(new Tuple(run[0], run[1])); run = new int[] { nz[i], 1 }; } i += 1; } runs.Add(new Tuple(run[0], run[1])); // If we have a periodic encoder, merge the first and last run if they // both go all the way to the edges if (IsPeriodic() && runs.Count > 1) { int l = runs.Count - 1; if (((int)runs[0].Get(0)) == 0 && ((int)runs[l].Get(0)) + ((int)runs[l].Get(1)) == GetN()) { runs[l] = new Tuple((int)runs[l].Get(0), ((int)runs[l].Get(1)) + ((int)runs[0].Get(1))); runs = runs.SubList(1, runs.Count); } } // ------------------------------------------------------------------------ // Now, for each group of 1's, determine the "left" and "right" edges, where // the "left" edge is inset by halfwidth and the "right" edge is inset by // halfwidth. // For a group of width w or less, the "left" and "right" edge are both at // the center position of the group. int left = 0; int right = 0; List <MinMax> ranges = new List <MinMax>(); foreach (Tuple tupleRun in runs) { int start = (int)tupleRun.Get(0); int runLen = (int)tupleRun.Get(1); if (runLen <= GetW()) { left = right = start + runLen / 2; } else { left = start + GetHalfWidth(); right = start + runLen - 1 - GetHalfWidth(); } double inMin, inMax; // Convert to input space. if (!IsPeriodic()) { inMin = (left - GetPadding()) * GetResolution() + GetMinVal(); inMax = (right - GetPadding()) * GetResolution() + GetMinVal(); } else { inMin = (left - GetPadding()) * GetRange() / GetNInternal() + GetMinVal(); inMax = (right - GetPadding()) * GetRange() / GetNInternal() + GetMinVal(); } // Handle wrap-around if periodic if (IsPeriodic()) { if (inMin >= GetMaxVal()) { inMin -= GetRange(); inMax -= GetRange(); } } // Clip low end if (inMin < GetMinVal()) { inMin = GetMinVal(); } if (inMax < GetMinVal()) { inMax = GetMinVal(); } // If we have a periodic encoder, and the max is past the edge, break into // 2 separate ranges if (IsPeriodic() && inMax >= GetMaxVal()) { ranges.Add(new MinMax(inMin, GetMaxVal())); ranges.Add(new MinMax(GetMinVal(), inMax - GetRange())); } else { if (inMax > GetMaxVal()) { inMax = GetMaxVal(); } if (inMin > GetMaxVal()) { inMin = GetMaxVal(); } ranges.Add(new MinMax(inMin, inMax)); } } string desc = GenerateRangeDescription(ranges); string fieldName; // Return result if (parentFieldName != null && !string.IsNullOrWhiteSpace(parentFieldName)) { fieldName = string.Format("%s.%s", parentFieldName, GetName()); } else { fieldName = GetName(); } RangeList inner = new RangeList(ranges, desc); Map <string, RangeList> fieldsDict = new Map <string, RangeList>(); fieldsDict.Add(fieldName, inner); return(new DecodeResult(fieldsDict, new List <string> { fieldName })); }