public void testRecycleLeastRecentlyActiveSegmentToMakeRoomForNewSegment() { TemporalMemory tm = new TemporalMemory(); Connections cn = new Connections(); Parameters p = GetDefaultParameters(null, Parameters.KEY.CELLS_PER_COLUMN, 1); p = GetDefaultParameters(p, Parameters.KEY.INITIAL_PERMANENCE, 0.5); p = GetDefaultParameters(p, Parameters.KEY.PERMANENCE_INCREMENT, 0.02); p = GetDefaultParameters(p, Parameters.KEY.PERMANENCE_DECREMENT, 0.02); p.SetParameterByKey(Parameters.KEY.MAX_SEGMENTS_PER_CELL, 2); p.Apply(cn); TemporalMemory.Init(cn); int[] prevActiveColumns1 = { 0, 1, 2 }; int[] prevActiveColumns2 = { 3, 4, 5 }; int[] prevActiveColumns3 = { 6, 7, 8 }; int[] activeColumns = { 9 }; Cell cell9 = cn.GetCell(9); tm.Compute(cn, prevActiveColumns1, true); tm.Compute(cn, activeColumns, true); Assert.AreEqual(1, cn.GetSegments(cell9).Count); DistalDendrite oldestSegment = cn.GetSegments(cell9)[0]; tm.Reset(cn); tm.Compute(cn, prevActiveColumns2, true); tm.Compute(cn, activeColumns, true); Assert.AreEqual(2, cn.GetSegments(cell9).Count); HashSet <Cell> oldPresynaptic = new HashSet <Cell>(cn.GetSynapses(oldestSegment) .Select(s => s.GetPresynapticCell())); tm.Reset(cn); tm.Compute(cn, prevActiveColumns3, true); tm.Compute(cn, activeColumns, true); Assert.AreEqual(2, cn.GetSegments(cell9).Count); // Verify none of the segments are connected to the cells the old // segment was connected to. foreach (DistalDendrite segment in cn.GetSegments(cell9)) { HashSet <Cell> newPresynaptic = new HashSet <Cell>(cn.GetSynapses(segment) .Select(s => s.GetPresynapticCell())); //.collect(Collectors.toSet()); Assert.IsFalse(oldPresynaptic.Overlaps(newPresynaptic)); //Assert.IsTrue(Collections.disjoint(oldPresynaptic, newPresynaptic)); } }
/// <summary> /// /// </summary> private void RunExperiment(int inputBits, Parameters p, EncoderBase encoder, List <double> inputValues) { Stopwatch sw = new Stopwatch(); sw.Start(); int maxMatchCnt = 0; bool learn = true; CortexNetwork net = new CortexNetwork("my cortex"); List <CortexRegion> regions = new List <CortexRegion>(); CortexRegion region0 = new CortexRegion("1st Region"); regions.Add(region0); var mem = new Connections(); p.apply(mem); //bool isInStableState = false; //HtmClassifier<double, ComputeCycle> cls = new HtmClassifier<double, ComputeCycle>(); HtmClassifier <string, ComputeCycle> cls = new HtmClassifier <string, ComputeCycle>(); var numInputs = inputValues.Distinct().ToList().Count; TemporalMemory tm1 = new TemporalMemory(); HomeostaticPlasticityController hpa = new HomeostaticPlasticityController(mem, numInputs * 55, (isStable, numPatterns, actColAvg, seenInputs) => { if (isStable) { // Event should be fired when entering the stable state. Debug.WriteLine($"STABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}"); } else { // Ideal SP should never enter unstable state after stable state. Debug.WriteLine($"INSTABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}"); } Assert.IsTrue(numPatterns == numInputs); //isInStableState = true; cls.ClearState(); tm1.Reset(mem); }, numOfCyclesToWaitOnChange: 25); SpatialPoolerMT sp1 = new SpatialPoolerMT(hpa); sp1.Init(mem, UnitTestHelpers.GetMemory()); tm1.Init(mem); CortexLayer <object, object> layer1 = new CortexLayer <object, object>("L1"); region0.AddLayer(layer1); layer1.HtmModules.Add("encoder", encoder); layer1.HtmModules.Add("sp", sp1); layer1.HtmModules.Add("tm", tm1); double[] inputs = inputValues.ToArray(); int[] prevActiveCols = new int[0]; int cycle = 0; int matches = 0; string lastPredictedValue = "0"; Dictionary <double, List <List <int> > > activeColumnsLst = new Dictionary <double, List <List <int> > >(); foreach (var input in inputs) { if (activeColumnsLst.ContainsKey(input) == false) { activeColumnsLst.Add(input, new List <List <int> >()); } } int maxCycles = 3500; int maxPrevInputs = inputValues.Count - 1; List <string> previousInputs = new List <string>(); previousInputs.Add("-1.0"); // // Now training with SP+TM. SP is pretrained on the given input pattern. for (int i = 0; i < maxCycles; i++) { matches = 0; cycle++; Debug.WriteLine($"-------------- Cycle {cycle} ---------------"); foreach (var input in inputs) { Debug.WriteLine($"-------------- {input} ---------------"); var lyrOut = layer1.Compute(input, learn) as ComputeCycle; var activeColumns = layer1.GetResult("sp") as int[]; activeColumnsLst[input].Add(activeColumns.ToList()); previousInputs.Add(input.ToString()); if (previousInputs.Count > maxPrevInputs + 1) { previousInputs.RemoveAt(0); } string key = GetKey(previousInputs, input); List <Cell> actCells; if (lyrOut.ActiveCells.Count == lyrOut.WinnerCells.Count) { actCells = lyrOut.ActiveCells; } else { actCells = lyrOut.WinnerCells; } cls.Learn(key, actCells.ToArray()); if (learn == false) { Debug.WriteLine($"Inference mode"); } Debug.WriteLine($"Col SDR: {Helpers.StringifyVector(lyrOut.ActivColumnIndicies)}"); Debug.WriteLine($"Cell SDR: {Helpers.StringifyVector(actCells.Select(c => c.Index).ToArray())}"); if (key == lastPredictedValue) { matches++; Debug.WriteLine($"Match. Actual value: {key} - Predicted value: {lastPredictedValue}"); } else { Debug.WriteLine($"Missmatch! Actual value: {key} - Predicted value: {lastPredictedValue}"); } if (lyrOut.PredictiveCells.Count > 0) { var predictedInputValue = cls.GetPredictedInputValue(lyrOut.PredictiveCells.ToArray()); Debug.WriteLine($"Current Input: {input} \t| Predicted Input: {predictedInputValue}"); lastPredictedValue = predictedInputValue; } else { Debug.WriteLine($"NO CELLS PREDICTED for next cycle."); lastPredictedValue = string.Empty; } } // The brain does not do that this way, so we don't use it. // tm1.reset(mem); double accuracy = matches / (double)inputs.Length * 100.0; Debug.WriteLine($"Cycle: {cycle}\tMatches={matches} of {inputs.Length}\t {accuracy}%"); if (accuracy == 100.0) { maxMatchCnt++; Debug.WriteLine($"100% accuracy reched {maxMatchCnt} times."); if (maxMatchCnt >= 30) { sw.Stop(); Debug.WriteLine($"Exit experiment in the stable state after 30 repeats with 100% of accuracy. Elapsed time: {sw.ElapsedMilliseconds / 1000 / 60} min."); learn = false; //var testInputs = new double[] { 0.0, 2.0, 3.0, 4.0, 5.0, 6.0, 5.0, 4.0, 3.0, 7.0, 1.0, 9.0, 12.0, 11.0, 0.0, 1.0 }; // C-0, D-1, E-2, F-3, G-4, H-5 //var testInputs = new double[] { 0.0, 0.0, 4.0, 4.0, 5.0, 5.0, 4.0, 3.0, 3.0, 2.0, 2.0, 1.0, 1.0, 0.0 }; //// Traverse the sequence and check prediction. //foreach (var input in inputValues) //{ // var lyrOut = layer1.Compute(input, learn) as ComputeCycle; // predictedInputValue = cls.GetPredictedInputValue(lyrOut.predictiveCells.ToArray()); // Debug.WriteLine($"I={input} - P={predictedInputValue}"); //} /* * // * // Here we let the HTM predict sequence five times on its own. * // We start with last predicted value. * int cnt = 5 * inputValues.Count; * * Debug.WriteLine("---- Start Predicting the Sequence -----"); * * // * // This code snippet starts with some input value and tries to predict all next inputs * // as they have been learned as a sequence. * // We take a random value to start somwhere in the sequence. * var predictedInputValue = inputValues[new Random().Next(0, inputValues.Count - 1)].ToString(); * * List<string> predictedValues = new List<string>(); * * while (--cnt > 0) * { * //var lyrOut = layer1.Compute(predictedInputValue, learn) as ComputeCycle; * var lyrOut = layer1.Compute(double.Parse(predictedInputValue[predictedInputValue.Length - 1].ToString()), false) as ComputeCycle; * predictedInputValue = cls.GetPredictedInputValue(lyrOut.PredictiveCells.ToArray()); * predictedValues.Add(predictedInputValue); * }; * * // Now we have a sequence of elements and watch in the trace if it matches to defined input set. * foreach (var item in predictedValues) * { * Debug.Write(item); * Debug.Write(" ,"); * }*/ break; } } else if (maxMatchCnt > 0) { Debug.WriteLine($"At 100% accuracy after {maxMatchCnt} repeats we get a drop of accuracy with {accuracy}. This indicates instable state. Learning will be continued."); maxMatchCnt = 0; } } Debug.WriteLine("---- cell state trace ----"); cls.TraceState($"cellState_MinPctOverlDuty-{p[KEY.MIN_PCT_OVERLAP_DUTY_CYCLES]}_MaxBoost-{p[KEY.MAX_BOOST]}.csv"); Debug.WriteLine("---- Spatial Pooler column state ----"); foreach (var input in activeColumnsLst) { using (StreamWriter colSw = new StreamWriter($"ColumState_MinPctOverlDuty-{p[KEY.MIN_PCT_OVERLAP_DUTY_CYCLES]}_MaxBoost-{p[KEY.MAX_BOOST]}_input-{input.Key}.csv")) { Debug.WriteLine($"------------ {input.Key} ------------"); foreach (var actCols in input.Value) { Debug.WriteLine(Helpers.StringifyVector(actCols.ToArray())); colSw.WriteLine(Helpers.StringifyVector(actCols.ToArray())); } } } Debug.WriteLine("------------ END ------------"); }
public void LongerSequenceExperiment() { int inputBits = 1024; bool learn = true; Parameters p = Parameters.getAllDefaultParameters(); p.Set(KEY.RANDOM, new ThreadSafeRandom(42)); p.Set(KEY.INPUT_DIMENSIONS, new int[] { inputBits }); p.Set(KEY.CELLS_PER_COLUMN, 10); p.Set(KEY.COLUMN_DIMENSIONS, new int[] { 2048 }); CortexNetwork net = new CortexNetwork("my cortex"); List <CortexRegion> regions = new List <CortexRegion>(); CortexRegion region0 = new CortexRegion("1st Region"); regions.Add(region0); SpatialPoolerMT sp1 = new SpatialPoolerMT(); TemporalMemory tm1 = new TemporalMemory(); var mem = new Connections(); p.apply(mem); sp1.Init(mem, UnitTestHelpers.GetMemory()); tm1.Init(mem); Dictionary <string, object> settings = new Dictionary <string, object>() { { "W", 21 }, { "N", inputBits }, { "Radius", -1.0 }, { "MinVal", 0.0 }, // { "MaxVal", 20.0 }, { "Periodic", false }, { "Name", "scalar" }, { "ClipInput", false }, }; double max = 50; List <double> lst = new List <double>(); for (double i = 0; i < max; i++) { lst.Add(i); } settings["MaxVal"] = max; EncoderBase encoder = new ScalarEncoder(settings); CortexLayer <object, object> layer1 = new CortexLayer <object, object>("L1"); // // NewBorn learning stage. region0.AddLayer(layer1); layer1.HtmModules.Add("encoder", encoder); layer1.HtmModules.Add("sp", sp1); HtmClassifier <double, ComputeCycle> cls = new HtmClassifier <double, ComputeCycle>(); double[] inputs = lst.ToArray(); // // This trains SP. foreach (var input in inputs) { Debug.WriteLine($" ** {input} **"); for (int i = 0; i < 3; i++) { var lyrOut = layer1.Compute((object)input, learn) as ComputeCycle; } } // Here we add TM module to the layer. layer1.HtmModules.Add("tm", tm1); // // Now, training with SP+TM. SP is pretrained on pattern. for (int i = 0; i < 200; i++) { foreach (var input in inputs) { var lyrOut = layer1.Compute(input, learn) as ComputeCycle; cls.Learn(input, lyrOut.ActiveCells.ToArray()); Debug.WriteLine($"-------------- {input} ---------------"); if (learn == false) { Debug.WriteLine($"Inference mode"); } Debug.WriteLine($"W: {Helpers.StringifyVector(lyrOut.WinnerCells.Select(c => c.Index).ToArray())}"); Debug.WriteLine($"P: {Helpers.StringifyVector(lyrOut.PredictiveCells.Select(c => c.Index).ToArray())}"); Debug.WriteLine($"Current Input: {input} \t| Predicted Input: {cls.GetPredictedInputValue(lyrOut.PredictiveCells.ToArray())}"); } if (i == 50) { Debug.WriteLine("Stop Learning From Here. Entering inference mode."); learn = false; } tm1.Reset(mem); } cls.TraceState(); Debug.WriteLine("------------------------------------------------------------------------\n----------------------------------------------------------------------------"); }
public void TestRecycleLeastRecentlyActiveSegmentToMakeRoomForNewSegment() { TemporalMemory tm = new TemporalMemory(); Connections cn = new Connections(); Parameters p = getDefaultParameters(null, KEY.CELLS_PER_COLUMN, 1); p = getDefaultParameters(p, KEY.INITIAL_PERMANENCE, 0.5); p = getDefaultParameters(p, KEY.PERMANENCE_INCREMENT, 0.02); p = getDefaultParameters(p, KEY.PERMANENCE_DECREMENT, 0.02); p.Set(KEY.MAX_SEGMENTS_PER_CELL, 2); p.apply(cn); tm.Init(cn); int[] prevActiveColumns1 = { 0, 1, 2 }; int[] prevActiveColumns2 = { 3, 4, 5 }; int[] prevActiveColumns3 = { 6, 7, 8 }; int[] activeColumns = { 9 }; Cell cell9 = cn.GetCell(9); tm.Compute(prevActiveColumns1, true); tm.Compute(activeColumns, true); //DD //Assert.AreEqual(1, cn.GetSegments(cell9).Count); Assert.AreEqual(1, cell9.DistalDendrites.Count); //DD //DistalDendrite oldestSegment = cn.GetSegments(cell9)[0]; DistalDendrite oldestSegment = cell9.DistalDendrites[0]; tm.Reset(cn); tm.Compute(prevActiveColumns2, true); tm.Compute(activeColumns, true); //DD //Assert.AreEqual(2, cn.GetSegments(cell9).Count); Assert.AreEqual(2, cell9.DistalDendrites.Count); //Set<Cell> oldPresynaptic = cn.getSynapses(oldestSegment) // .stream() // .map(s->s.getPresynapticCell()) // .collect(Collectors.toSet()); //var oldPresynaptic = cn.GetSynapses(oldestSegment).Select(s => s.getPresynapticCell()).ToList(); var oldPresynaptic = oldestSegment.Synapses.Select(s => s.GetPresynapticCell()).ToList(); tm.Reset(cn); tm.Compute(prevActiveColumns3, true); tm.Compute(activeColumns, true); //DD //Assert.AreEqual(2, cn.GetSegments(cell9).Count); Assert.AreEqual(2, cell9.DistalDendrites.Count); // Verify none of the segments are connected to the cells the old // segment was connected to. //DD //foreach (DistalDendrite segment in cn.GetSegments(cell9)) foreach (DistalDendrite segment in cell9.DistalDendrites) { //Set<Cell> newPresynaptic = cn.getSynapses(segment) // .stream() // .map(s->s.getPresynapticCell()) // .collect(Collectors.toSet()); //DD var newPresynaptic = cn.GetSynapses(segment).Select(s => s.getPresynapticCell()).ToList(); var newPresynaptic = segment.Synapses.Select(s => s.GetPresynapticCell()).ToList(); Assert.IsTrue(areDisjoined <Cell>(oldPresynaptic, newPresynaptic)); } }
/// <summary> /// /// </summary> private static void RunExperiment(int inputBits, HtmConfig cfg, EncoderBase encoder, List <double> inputValues) { Stopwatch sw = new Stopwatch(); sw.Start(); int maxMatchCnt = 0; bool learn = true; CortexNetwork net = new CortexNetwork("my cortex"); List <CortexRegion> regions = new List <CortexRegion>(); CortexRegion region0 = new CortexRegion("1st Region"); regions.Add(region0); var mem = new Connections(cfg); bool isInStableState; HtmClassifier <string, ComputeCycle> cls = new HtmClassifier <string, ComputeCycle>(); var numInputs = inputValues.Distinct <double>().ToList().Count; TemporalMemory tm1 = new TemporalMemory(); HomeostaticPlasticityController hpa = new HomeostaticPlasticityController(mem, numInputs * 55, (isStable, numPatterns, actColAvg, seenInputs) => { if (isStable) { // Event should be fired when entering the stable state. Debug.WriteLine($"STABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}"); } else { // Ideal SP should never enter unstable state after stable state. Debug.WriteLine($"INSTABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}"); } if (numPatterns != numInputs) { throw new InvalidOperationException("Stable state must observe all input patterns"); } isInStableState = true; cls.ClearState(); tm1.Reset(mem); }, numOfCyclesToWaitOnChange: 25); SpatialPoolerMT sp1 = new SpatialPoolerMT(hpa); sp1.Init(mem, new DistributedMemory() { ColumnDictionary = new InMemoryDistributedDictionary <int, NeoCortexApi.Entities.Column>(1), }); tm1.Init(mem); CortexLayer <object, object> layer1 = new CortexLayer <object, object>("L1"); region0.AddLayer(layer1); layer1.HtmModules.Add("encoder", encoder); layer1.HtmModules.Add("sp", sp1); layer1.HtmModules.Add("tm", tm1); double[] inputs = inputValues.ToArray(); int[] prevActiveCols = new int[0]; int cycle = 0; int matches = 0; string lastPredictedValue = "0"; String prediction = null; Dictionary <double, List <List <int> > > activeColumnsLst = new Dictionary <double, List <List <int> > >(); foreach (var input in inputs) { if (activeColumnsLst.ContainsKey(input) == false) { activeColumnsLst.Add(input, new List <List <int> >()); } } int maxCycles = 3500; int maxPrevInputs = inputValues.Count - 1; List <string> previousInputs = new List <string>(); previousInputs.Add("-1.0"); // // Now training with SP+TM. SP is pretrained on the given input pattern. for (int i = 0; i < maxCycles; i++) { matches = 0; cycle++; Debug.WriteLine($"-------------- Cycle {cycle} ---------------"); foreach (var input in inputs) { Debug.WriteLine($"-------------- {input} ---------------"); var lyrOut = layer1.Compute(input, learn) as ComputeCycle; var activeColumns = layer1.GetResult("sp") as int[]; activeColumnsLst[input].Add(activeColumns.ToList()); previousInputs.Add(input.ToString()); if (previousInputs.Count > (maxPrevInputs + 1)) { previousInputs.RemoveAt(0); } string key = GetKey(previousInputs, input); cls.Learn(key, lyrOut.ActiveCells.ToArray()); if (learn == false) { Debug.WriteLine($"Inference mode"); } Debug.WriteLine($"Col SDR: {Helpers.StringifyVector(lyrOut.ActivColumnIndicies)}"); Debug.WriteLine($"Cell SDR: {Helpers.StringifyVector(lyrOut.ActiveCells.Select(c => c.Index).ToArray())}"); if (key == lastPredictedValue) { matches++; Debug.WriteLine($"Match. Actual value: {key} - Predicted value: {lastPredictedValue}"); } else { Debug.WriteLine($"Missmatch! Actual value: {key} - Predicted value: {lastPredictedValue}"); } if (lyrOut.PredictiveCells.Count > 0) { var predictedInputValue = cls.GetPredictedInputValues(lyrOut.PredictiveCells.ToArray(), 3); Debug.WriteLine($"Current Input: {input}"); Debug.WriteLine("The predictions with similarity greater than 50% are"); foreach (var t in predictedInputValue) { if (t.Similarity >= (double)50.00) { Debug.WriteLine($"Predicted Input: {string.Join(", ", t.PredictedInput)},\tSimilarity Percentage: {string.Join(", ", t.Similarity)}, \tNumber of Same Bits: {string.Join(", ", t.NumOfSameBits)}"); } } lastPredictedValue = predictedInputValue.First().PredictedInput; } else { Debug.WriteLine($"NO CELLS PREDICTED for next cycle."); lastPredictedValue = String.Empty; } } double accuracy = (double)matches / (double)inputs.Length * 100.0; Debug.WriteLine($"Cycle: {cycle}\tMatches={matches} of {inputs.Length}\t {accuracy}%"); if (accuracy == 100.0) { maxMatchCnt++; Debug.WriteLine($"100% accuracy reched {maxMatchCnt} times."); if (maxMatchCnt >= 30) { sw.Stop(); Debug.WriteLine($"Exit experiment in the stable state after 30 repeats with 100% of accuracy. Elapsed time: {sw.ElapsedMilliseconds / 1000 / 60} min."); learn = false; break; } } else if (maxMatchCnt > 0) { Debug.WriteLine($"At 100% accuracy after {maxMatchCnt} repeats we get a drop of accuracy with {accuracy}. This indicates instable state. Learning will be continued."); maxMatchCnt = 0; } } Debug.WriteLine("---- cell state trace ----"); cls.TraceState($"cellState_MinPctOverlDuty-{cfg.MinPctOverlapDutyCycles}_MaxBoost-{cfg.MaxBoost}.csv"); Debug.WriteLine("---- Spatial Pooler column state ----"); foreach (var input in activeColumnsLst) { using (StreamWriter colSw = new StreamWriter($"ColumState_MinPctOverlDuty-{cfg.MinPctOverlapDutyCycles}_MaxBoost-{cfg.MaxBoost}_input-{input.Key}.csv")) { Debug.WriteLine($"------------ {input.Key} ------------"); foreach (var actCols in input.Value) { Debug.WriteLine(Helpers.StringifyVector(actCols.ToArray())); colSw.WriteLine(Helpers.StringifyVector(actCols.ToArray())); } } } Debug.WriteLine("------------ END ------------"); Console.WriteLine("\n Please enter a number that has been learnt"); int inputNumber = Convert.ToInt16(Console.ReadLine()); Inference(inputNumber, false, layer1, cls); }
/// <summary> /// /// </summary> private HtmPredictionEngine RunExperiment(int inputBits, HtmConfig cfg, EncoderBase encoder, Dictionary <string, List <double> > sequences) { Stopwatch sw = new Stopwatch(); sw.Start(); int maxMatchCnt = 0; var mem = new Connections(cfg); bool isInStableState = false; HtmClassifier <string, ComputeCycle> cls = new HtmClassifier <string, ComputeCycle>(); var numUniqueInputs = GetNumberOfInputs(sequences); CortexLayer <object, object> layer1 = new CortexLayer <object, object>("L1"); TemporalMemory tm = new TemporalMemory(); // For more information see following paper: https://www.scitepress.org/Papers/2021/103142/103142.pdf HomeostaticPlasticityController hpc = new HomeostaticPlasticityController(mem, numUniqueInputs * 150, (isStable, numPatterns, actColAvg, seenInputs) => { if (isStable) { // Event should be fired when entering the stable state. Debug.WriteLine($"STABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}"); } else { // Ideal SP should never enter unstable state after stable state. Debug.WriteLine($"INSTABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}"); } // We are not learning in instable state. isInStableState = isStable; // Clear active and predictive cells. //tm.Reset(mem); }, numOfCyclesToWaitOnChange: 50); SpatialPoolerMT sp = new SpatialPoolerMT(hpc); sp.Init(mem); tm.Init(mem); // Please note that we do not add here TM in the layer. // This is omitted for practical reasons, because we first eneter the newborn-stage of the algorithm // In this stage we want that SP get boosted and see all elements before we start learning with TM. // All would also work fine with TM in layer, but it would work much slower. // So, to improve the speed of experiment, we first ommit the TM and then after the newborn-stage we add it to the layer. layer1.HtmModules.Add("encoder", encoder); layer1.HtmModules.Add("sp", sp); //double[] inputs = inputValues.ToArray(); int[] prevActiveCols = new int[0]; int cycle = 0; int matches = 0; var lastPredictedValues = new List <string>(new string[] { "0" }); int maxCycles = 3500; // // Training SP to get stable. New-born stage. // for (int i = 0; i < maxCycles && isInStableState == false; i++) { matches = 0; cycle++; Debug.WriteLine($"-------------- Newborn Cycle {cycle} ---------------"); foreach (var inputs in sequences) { foreach (var input in inputs.Value) { Debug.WriteLine($" -- {inputs.Key} - {input} --"); var lyrOut = layer1.Compute(input, true); if (isInStableState) { break; } } if (isInStableState) { break; } } } // Clear all learned patterns in the classifier. cls.ClearState(); // We activate here the Temporal Memory algorithm. layer1.HtmModules.Add("tm", tm); // // Loop over all sequences. foreach (var sequenceKeyPair in sequences) { Debug.WriteLine($"-------------- Sequences {sequenceKeyPair.Key} ---------------"); int maxPrevInputs = sequenceKeyPair.Value.Count - 1; List <string> previousInputs = new List <string>(); previousInputs.Add("-1.0"); // // Now training with SP+TM. SP is pretrained on the given input pattern set. for (int i = 0; i < maxCycles; i++) { matches = 0; cycle++; Debug.WriteLine(""); Debug.WriteLine($"-------------- Cycle {cycle} ---------------"); Debug.WriteLine(""); foreach (var input in sequenceKeyPair.Value) { Debug.WriteLine($"-------------- {input} ---------------"); var lyrOut = layer1.Compute(input, true) as ComputeCycle; var activeColumns = layer1.GetResult("sp") as int[]; previousInputs.Add(input.ToString()); if (previousInputs.Count > (maxPrevInputs + 1)) { previousInputs.RemoveAt(0); } // In the pretrained SP with HPC, the TM will quickly learn cells for patterns // In that case the starting sequence 4-5-6 might have the sam SDR as 1-2-3-4-5-6, // Which will result in returning of 4-5-6 instead of 1-2-3-4-5-6. // HtmClassifier allways return the first matching sequence. Because 4-5-6 will be as first // memorized, it will match as the first one. if (previousInputs.Count < maxPrevInputs) { continue; } string key = GetKey(previousInputs, input, sequenceKeyPair.Key); List <Cell> actCells; if (lyrOut.ActiveCells.Count == lyrOut.WinnerCells.Count) { actCells = lyrOut.ActiveCells; } else { actCells = lyrOut.WinnerCells; } cls.Learn(key, actCells.ToArray()); Debug.WriteLine($"Col SDR: {Helpers.StringifyVector(lyrOut.ActivColumnIndicies)}"); Debug.WriteLine($"Cell SDR: {Helpers.StringifyVector(actCells.Select(c => c.Index).ToArray())}"); // // If the list of predicted values from the previous step contains the currently presenting value, // we have a match. if (lastPredictedValues.Contains(key)) { matches++; Debug.WriteLine($"Match. Actual value: {key} - Predicted value: {lastPredictedValues.FirstOrDefault(key)}."); } else { Debug.WriteLine($"Missmatch! Actual value: {key} - Predicted values: {String.Join(',', lastPredictedValues)}"); } if (lyrOut.PredictiveCells.Count > 0) { //var predictedInputValue = cls.GetPredictedInputValue(lyrOut.PredictiveCells.ToArray()); var predictedInputValues = cls.GetPredictedInputValues(lyrOut.PredictiveCells.ToArray(), 3); foreach (var item in predictedInputValues) { Debug.WriteLine($"Current Input: {input} \t| Predicted Input: {item.PredictedInput} - {item.Similarity}"); } lastPredictedValues = predictedInputValues.Select(v => v.PredictedInput).ToList(); } else { Debug.WriteLine($"NO CELLS PREDICTED for next cycle."); lastPredictedValues = new List <string> (); } } // The first element (a single element) in the sequence cannot be predicted double maxPossibleAccuraccy = (double)((double)sequenceKeyPair.Value.Count - 1) / (double)sequenceKeyPair.Value.Count * 100.0; double accuracy = (double)matches / (double)sequenceKeyPair.Value.Count * 100.0; Debug.WriteLine($"Cycle: {cycle}\tMatches={matches} of {sequenceKeyPair.Value.Count}\t {accuracy}%"); if (accuracy >= maxPossibleAccuraccy) { maxMatchCnt++; Debug.WriteLine($"100% accuracy reched {maxMatchCnt} times."); // // Experiment is completed if we are 30 cycles long at the 100% accuracy. if (maxMatchCnt >= 30) { sw.Stop(); Debug.WriteLine($"Sequence learned. The algorithm is in the stable state after 30 repeats with with accuracy {accuracy} of maximum possible {maxMatchCnt}. Elapsed sequence {sequenceKeyPair.Key} learning time: {sw.Elapsed}."); break; } } else if (maxMatchCnt > 0) { Debug.WriteLine($"At 100% accuracy after {maxMatchCnt} repeats we get a drop of accuracy with accuracy {accuracy}. This indicates instable state. Learning will be continued."); maxMatchCnt = 0; } // This resets the learned state, so the first element starts allways from the beginning. tm.Reset(mem); } } Debug.WriteLine("------------ END ------------"); return(new HtmPredictionEngine { Layer = layer1, Classifier = cls, Connections = mem }); }