Exemple #1
0
        public void SpatialSequenceLearningExperiment()
        {
            var parameters = GetDefaultParams();

            parameters.Set(KEY.POTENTIAL_RADIUS, 64 * 64);
            parameters.Set(KEY.POTENTIAL_PCT, 1.0);
            parameters.Set(KEY.GLOBAL_INHIBITION, false);
            parameters.Set(KEY.STIMULUS_THRESHOLD, 0.5);
            parameters.Set(KEY.INHIBITION_RADIUS, (int)0.25 * 64 * 64);
            parameters.Set(KEY.LOCAL_AREA_DENSITY, -1);
            parameters.Set(KEY.NUM_ACTIVE_COLUMNS_PER_INH_AREA, 0.1 * 64 * 64);
            parameters.Set(KEY.DUTY_CYCLE_PERIOD, 1000000);
            parameters.Set(KEY.MAX_BOOST, 5);

            parameters.setInputDimensions(new int[] { 32, 32 });
            parameters.setColumnDimensions(new int[] { 64, 64 });
            parameters.setNumActiveColumnsPerInhArea(0.02 * 64 * 64);
            var sp  = new SpatialPoolerMT();
            var mem = new Connections();

            double[] inputSequence = new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0 };

            var inputVectors = GetEncodedSequence(inputSequence, 0.0, 100.0);

            parameters.apply(mem);

            sp.Init(mem, UnitTestHelpers.GetMemory());

            foreach (var inputVector in inputVectors)
            {
                for (int i = 0; i < 3; i++)
                {
                    var activeIndicies = sp.Compute(inputVector, true, true) as int[];
                    var activeArray    = sp.Compute(inputVector, true, false) as int[];

                    Debug.WriteLine(Helpers.StringifyVector(activeArray));
                    Debug.WriteLine(Helpers.StringifyVector(activeIndicies));
                }
            }
        }
        public void MyTestMethod()
        {
            //DIRECTORIES TO STORE INPUT AND OUTPUT FILES OF THE EXPERIMENT

            //Encoder
            string E_inFolder = "NoiseExperiments/Input"; //Encoder's raw input file directory
            //Directory.CreateDirectory(E_inFolder);

            //----------------INPUT FILE PATH-----------------
            string E_inFile_train = $"{E_inFolder}\\sinusoidal.csv";                 //Encoder's input file in "Training mode"
            //<Robustness>
            string E_inFile_robustness = $"{E_inFolder}/Noisy_N-0-2_sinusoidal.csv"; // Encoder's input file in "Testing mode - Robustness" - All the files with name of the form "Noisy_*.csv"
            //</Robustness>
            //< Specificity >
            string E_inFile_specificity = $"{E_inFolder}/sinusoidal-specificity.csv"; // Encoder's input file in "Testing mode - Specificity"
            //</ Specificity >
            //------------------------------------------------

            string E_outFolder = "NoiseExperiments/MyEncoderOutput"; //Encoder's graphical output (PNG format) during "Testing mode" will be created here

            Directory.CreateDirectory(E_outFolder);
            string E_outFolder_train = $"{E_outFolder}/train"; // Encoder's graphical output (PNG format) during "Training mode" will be created here

            Directory.CreateDirectory(E_outFolder_train);

            //Spatial Pooler
            string SP_inFolder = "NoiseExperiments/MySPInput"; //Spatial Pooler's input file (Encoder's output (CSV format)) directory

            Directory.CreateDirectory(SP_inFolder);
            string SP_inFile_train       = $"{SP_inFolder}/MyEncoderOut_train.csv";       //Spatial Pooler's input file during "Training mode"
            string SP_inFile_robustness  = $"{SP_inFolder}/MyEncoderOut_robustness.csv";  //Spatial Pooler's input file during "Testing mode - robustness"
            string SP_inFile_specificity = $"{SP_inFolder}/MyEncoderOut_specificity.csv"; //Spatial Pooler's input file during "Testing mode - specificity"
            string SP_outFolder          = "MySPOutput";                                  //Spatial Pooler's graphical output (PNG format) will be stored here

            Directory.CreateDirectory(SP_outFolder);

            string SP_outFolder_compare = $"{SP_outFolder}/compare_445"; //This folder containing CSV files, which show Hamming distance between Spatial Pooler's output in "Training mode" and "Testing Mode"

            Directory.CreateDirectory(SP_outFolder_compare);

            //--------------------OUTPUT FILE PATH-------------------------
            //<Robustness>
            string SP_outFile_robustness = $"{SP_outFolder_compare}/compare_N-0-2.csv"; //The final output file in "Robustness test"
            //</Robustness>
            //<Specificity>
            string SP_outFile_specificity = $"{SP_outFolder_compare}/compare_specificity.csv"; //The final output file in "Specificity test"
            //</Specificity>
            //-------------------------------------------------------------


            //-------------------------------------------------------
            //|                    HTM PARAMETERS                   |
            //-------------------------------------------------------

            const int E_outBits     = 445;  //Number of Scalar Encoder's output bits
            const int columnsNumber = 2048; //Number of Spatial Pooler's output columns

            Parameters p = Parameters.getAllDefaultParameters();

            p.Set(KEY.RANDOM, new ThreadSafeRandom(42));

            //------------------SPATIAL POOLER PARAMETERS-----------------
            p.Set(KEY.INPUT_DIMENSIONS, new int[] { E_outBits });
            p.Set(KEY.POTENTIAL_RADIUS, -1);
            p.Set(KEY.POTENTIAL_PCT, 1);
            p.Set(KEY.GLOBAL_INHIBITION, true);
            p.Set(KEY.INHIBITION_RADIUS, 15);

            //Leave it
            p.Set(KEY.LOCAL_AREA_DENSITY, -1.0);
            p.Set(KEY.NUM_ACTIVE_COLUMNS_PER_INH_AREA, 0.02 * columnsNumber);

            p.Set(KEY.STIMULUS_THRESHOLD, 0.5);
            p.Set(KEY.SYN_PERM_INACTIVE_DEC, 0.008);
            p.Set(KEY.SYN_PERM_ACTIVE_INC, 0.01);
            p.Set(KEY.SYN_PERM_CONNECTED, 0.10);

            //Leave it
            p.Set(KEY.SYN_PERM_BELOW_STIMULUS_INC, 0.01);
            p.Set(KEY.SYN_PERM_TRIM_THRESHOLD, 0.05);
            p.Set(KEY.MIN_PCT_OVERLAP_DUTY_CYCLES, 0.001);
            p.Set(KEY.MIN_PCT_ACTIVE_DUTY_CYCLES, 0.001);

            p.Set(KEY.DUTY_CYCLE_PERIOD, 100);
            p.Set(KEY.MAX_BOOST, 10 /*10.0*/);
            p.Set(KEY.WRAP_AROUND, true);
            //p.Set(KEY.LEARN, true);


            //-------------------TEMPORAL MEMORY PARAMETERS----------------
            p.Set(KEY.COLUMN_DIMENSIONS, new int[] { columnsNumber });
            p.Set(KEY.CELLS_PER_COLUMN, 32);
            p.Set(KEY.ACTIVATION_THRESHOLD, 10);
            p.Set(KEY.LEARNING_RADIUS, 10);
            p.Set(KEY.MIN_THRESHOLD, 9);
            p.Set(KEY.MAX_NEW_SYNAPSE_COUNT, 20);
            p.Set(KEY.MAX_SYNAPSES_PER_SEGMENT, 225);
            p.Set(KEY.MAX_SEGMENTS_PER_CELL, 225);
            p.Set(KEY.INITIAL_PERMANENCE, 0.21);
            p.Set(KEY.CONNECTED_PERMANENCE, 0.5);
            p.Set(KEY.PERMANENCE_INCREMENT, 0.10);
            p.Set(KEY.PERMANENCE_DECREMENT, 0.10);
            p.Set(KEY.PREDICTED_SEGMENT_DECREMENT, 0.1);
            //p.Set(KEY.LEARN, true);

            //---------------------------------------------------
            //|                    UNIT TEST                    |
            //---------------------------------------------------

            //Initiating HTM modules
            SpatialPoolerMT sp1 = new SpatialPoolerMT();
            TemporalMemory  tm1 = new TemporalMemory();
            var             mem = new Connections();

            p.apply(mem);
            sp1.Init(mem, UnitTestHelpers.GetMemory());
            tm1.Init(mem);
            HtmClassifier <double, ComputeCycle> cls = new HtmClassifier <double, ComputeCycle>();



            //-------------------ENCODING INPUTS----------------------
            Encoding(E_inFile_train, SP_inFile_train, E_outFolder_train, E_outBits);



            //--------------------TRAINING MODE---------------------

            //SP training
            for (int j = 0; j < 5; j++)
            {
                using (StreamReader sr = new StreamReader(SP_inFile_train))
                {
                    string line;
                    while ((line = sr.ReadLine()) != null)
                    {
                        string[] tokens   = line.Split(",");
                        int[]    SP_input = new int[E_outBits];
                        for (int i = 0; i < E_outBits; i++)
                        {
                            if (tokens[i + 1] == "0")
                            {
                                SP_input[i] = 0;
                            }
                            else
                            {
                                SP_input[i] = 1;
                            }
                        }
                        for (int i = 0; i < 3; i++)
                        {
                            sp1.Compute(SP_input, true);
                        }
                    }
                }
            }

            Debug.WriteLine("-----------------------------------------------------");
            Debug.WriteLine("|-----------------FINISHED TRAINING-----------------|");
            Debug.WriteLine("-----------------------------------------------------");

            //TM + SP training
            double lastPredictedValue = 0.0;

            for (int j = 0; j < 20; j++)
            {
                using (StreamReader sr = new StreamReader(SP_inFile_train))
                {
                    string line;
                    while ((line = sr.ReadLine()) != null)
                    {
                        string[] tokens   = line.Split(",");
                        int[]    SP_input = new int[E_outBits];
                        for (int i = 0; i < E_outBits; i++)
                        {
                            if (tokens[i + 1] == "0")
                            {
                                SP_input[i] = 0;
                            }
                            else
                            {
                                SP_input[i] = 1;
                            }
                        }
                        var    SP_res = sp1.Compute(SP_input, true);
                        var    TM_res = tm1.Compute(SP_res, true) as ComputeCycle;
                        double input  = Convert.ToDouble(tokens[0], CultureInfo.InvariantCulture);
                        Debug.WriteLine($"Input: {input} - Predicted: {lastPredictedValue}");
                        //cls.Learn(input, TM_res.ActiveCells.ToArray(), TM_res.PredictiveCells.ToArray());
                        lastPredictedValue = cls.GetPredictedInputValue(TM_res.PredictiveCells.ToArray());
                    }
                }
            }
        }
Exemple #3
0
        public void TestCortexLayer()
        {
            int inputBits = 100;

            double max        = 20;
            int    numColumns = 2048;

            List <double> inputValues = new List <double>(new double[] { 0.0, 1.0, 0.0, 2.0, 3.0, 4.0, 5.0, 6.0, 5.0, 4.0, 3.0, 7.0, 1.0, 9.0, 12.0, 11.0, 12.0, 13.0, 14.0, 11.0, 12.0, 14.0, 5.0, 7.0, 6.0, 9.0, 3.0, 4.0, 3.0, 4.0, 3.0, 4.0 });
            int           numInputs   = inputValues.Distinct().ToList().Count;

            var inputs = inputValues.ToArray();

            Dictionary <string, object> settings = new Dictionary <string, object>()
            {
                { "W", 15 },
                { "N", inputBits },
                { "Radius", -1.0 },
                { "MinVal", 0.0 },
                { "Periodic", false },
                { "Name", "scalar" },
                { "ClipInput", false },
                { "MaxVal", max }
            };

            EncoderBase encoder = new ScalarEncoder(settings);

            HtmConfig htmConfig = new HtmConfig(new int[] { inputBits }, new int[] { numColumns })
            {
                Random                     = new ThreadSafeRandom(42),
                CellsPerColumn             = 25,
                GlobalInhibition           = true,
                LocalAreaDensity           = -1,
                NumActiveColumnsPerInhArea = 0.02 * numColumns,
                PotentialRadius            = 50,
                InhibitionRadius           = 15,
                MaxBoost                   = 10.0,
                DutyCyclePeriod            = 25,
                MinPctOverlapDutyCycles    = 0.75,
                MaxNewSynapseCount         = (int)(0.02 * numColumns),
                ActivationThreshold        = 15,
                ConnectedPermanence        = 0.5,
                PermanenceDecrement        = 0.25,
                PermanenceIncrement        = 0.15,
                PredictedSegmentDecrement  = 0.1
            };

            Connections memory = new Connections(htmConfig);

            HomeostaticPlasticityController hpa = new HomeostaticPlasticityController(memory, numInputs * 55, (isStable, numPatterns, actColAvg, seenInputs) =>
            {
                if (isStable)
                {
                    // Event should be fired when entering the stable state.
                    Debug.WriteLine($"STABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}");
                }
                else
                {
                    // Ideal SP should never enter unstable state after stable state.
                    Debug.WriteLine($"INSTABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}");
                }
            }, numOfCyclesToWaitOnChange: 25);

            SpatialPoolerMT spatialPooler = new SpatialPoolerMT(hpa);

            spatialPooler.Init(memory, UnitTestHelpers.GetMemory());

            TemporalMemory temporalMemory = new TemporalMemory();

            temporalMemory.Init(memory);

            List <CortexRegion> regions = new List <CortexRegion>();
            CortexRegion        region0 = new CortexRegion("1st Region");

            regions.Add(region0);

            CortexLayer <object, object> layer1 = new CortexLayer <object, object>("L1");

            region0.AddLayer(layer1);
            layer1.HtmModules.Add("encoder", encoder);
            layer1.HtmModules.Add("sp", spatialPooler);
            layer1.HtmModules.Add("tm", temporalMemory);

            bool learn = true;

            int maxCycles = 3500;

            for (int i = 0; i < maxCycles; i++)
            {
                foreach (var input in inputs)
                {
                    var lyrOut = layer1.Compute(input, learn) as ComputeCycle;
                }
            }
        }
Exemple #4
0
        /// <summary>
        ///
        /// </summary>
        private void RunExperiment(int inputBits, Parameters p, EncoderBase encoder, List <double> inputValues)
        {
            Stopwatch sw = new Stopwatch();

            sw.Start();

            int  maxMatchCnt = 0;
            bool learn       = true;

            CortexNetwork       net     = new CortexNetwork("my cortex");
            List <CortexRegion> regions = new List <CortexRegion>();
            CortexRegion        region0 = new CortexRegion("1st Region");

            regions.Add(region0);

            var mem = new Connections();

            p.apply(mem);

            //bool isInStableState = false;

            //HtmClassifier<double, ComputeCycle> cls = new HtmClassifier<double, ComputeCycle>();
            HtmClassifier <string, ComputeCycle> cls = new HtmClassifier <string, ComputeCycle>();

            var numInputs = inputValues.Distinct().ToList().Count;

            TemporalMemory tm1 = new TemporalMemory();

            HomeostaticPlasticityController hpa = new HomeostaticPlasticityController(mem, numInputs * 55, (isStable, numPatterns, actColAvg, seenInputs) =>
            {
                if (isStable)
                {
                    // Event should be fired when entering the stable state.
                    Debug.WriteLine($"STABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}");
                }
                else
                {
                    // Ideal SP should never enter unstable state after stable state.
                    Debug.WriteLine($"INSTABLE: Patterns: {numPatterns}, Inputs: {seenInputs}, iteration: {seenInputs / numPatterns}");
                }

                Assert.IsTrue(numPatterns == numInputs);
                //isInStableState = true;
                cls.ClearState();

                tm1.Reset(mem);
            }, numOfCyclesToWaitOnChange: 25);


            SpatialPoolerMT sp1 = new SpatialPoolerMT(hpa);

            sp1.Init(mem, UnitTestHelpers.GetMemory());
            tm1.Init(mem);

            CortexLayer <object, object> layer1 = new CortexLayer <object, object>("L1");

            region0.AddLayer(layer1);
            layer1.HtmModules.Add("encoder", encoder);
            layer1.HtmModules.Add("sp", sp1);
            layer1.HtmModules.Add("tm", tm1);

            double[] inputs         = inputValues.ToArray();
            int[]    prevActiveCols = new int[0];

            int cycle   = 0;
            int matches = 0;

            string lastPredictedValue = "0";

            Dictionary <double, List <List <int> > > activeColumnsLst = new Dictionary <double, List <List <int> > >();

            foreach (var input in inputs)
            {
                if (activeColumnsLst.ContainsKey(input) == false)
                {
                    activeColumnsLst.Add(input, new List <List <int> >());
                }
            }

            int           maxCycles      = 3500;
            int           maxPrevInputs  = inputValues.Count - 1;
            List <string> previousInputs = new List <string>();

            previousInputs.Add("-1.0");

            //
            // Now training with SP+TM. SP is pretrained on the given input pattern.
            for (int i = 0; i < maxCycles; i++)
            {
                matches = 0;

                cycle++;

                Debug.WriteLine($"-------------- Cycle {cycle} ---------------");

                foreach (var input in inputs)
                {
                    Debug.WriteLine($"-------------- {input} ---------------");

                    var lyrOut = layer1.Compute(input, learn) as ComputeCycle;

                    var activeColumns = layer1.GetResult("sp") as int[];

                    activeColumnsLst[input].Add(activeColumns.ToList());

                    previousInputs.Add(input.ToString());
                    if (previousInputs.Count > maxPrevInputs + 1)
                    {
                        previousInputs.RemoveAt(0);
                    }

                    string key = GetKey(previousInputs, input);


                    List <Cell> actCells;

                    if (lyrOut.ActiveCells.Count == lyrOut.WinnerCells.Count)
                    {
                        actCells = lyrOut.ActiveCells;
                    }
                    else
                    {
                        actCells = lyrOut.WinnerCells;
                    }

                    cls.Learn(key, actCells.ToArray());

                    if (learn == false)
                    {
                        Debug.WriteLine($"Inference mode");
                    }

                    Debug.WriteLine($"Col  SDR: {Helpers.StringifyVector(lyrOut.ActivColumnIndicies)}");
                    Debug.WriteLine($"Cell SDR: {Helpers.StringifyVector(actCells.Select(c => c.Index).ToArray())}");

                    if (key == lastPredictedValue)
                    {
                        matches++;
                        Debug.WriteLine($"Match. Actual value: {key} - Predicted value: {lastPredictedValue}");
                    }
                    else
                    {
                        Debug.WriteLine($"Missmatch! Actual value: {key} - Predicted value: {lastPredictedValue}");
                    }

                    if (lyrOut.PredictiveCells.Count > 0)
                    {
                        var predictedInputValue = cls.GetPredictedInputValue(lyrOut.PredictiveCells.ToArray());

                        Debug.WriteLine($"Current Input: {input} \t| Predicted Input: {predictedInputValue}");

                        lastPredictedValue = predictedInputValue;
                    }
                    else
                    {
                        Debug.WriteLine($"NO CELLS PREDICTED for next cycle.");
                        lastPredictedValue = string.Empty;
                    }
                }

                // The brain does not do that this way, so we don't use it.
                // tm1.reset(mem);

                double accuracy = matches / (double)inputs.Length * 100.0;

                Debug.WriteLine($"Cycle: {cycle}\tMatches={matches} of {inputs.Length}\t {accuracy}%");

                if (accuracy == 100.0)
                {
                    maxMatchCnt++;
                    Debug.WriteLine($"100% accuracy reched {maxMatchCnt} times.");
                    if (maxMatchCnt >= 30)
                    {
                        sw.Stop();
                        Debug.WriteLine($"Exit experiment in the stable state after 30 repeats with 100% of accuracy. Elapsed time: {sw.ElapsedMilliseconds / 1000 / 60} min.");
                        learn = false;
                        //var testInputs = new double[] { 0.0, 2.0, 3.0, 4.0, 5.0, 6.0, 5.0, 4.0, 3.0, 7.0, 1.0, 9.0, 12.0, 11.0, 0.0, 1.0 };

                        // C-0, D-1, E-2, F-3, G-4, H-5
                        //var testInputs = new double[] { 0.0, 0.0, 4.0, 4.0, 5.0, 5.0, 4.0, 3.0, 3.0, 2.0, 2.0, 1.0, 1.0, 0.0 };

                        //// Traverse the sequence and check prediction.
                        //foreach (var input in inputValues)
                        //{
                        //    var lyrOut = layer1.Compute(input, learn) as ComputeCycle;
                        //    predictedInputValue = cls.GetPredictedInputValue(lyrOut.predictiveCells.ToArray());
                        //    Debug.WriteLine($"I={input} - P={predictedInputValue}");
                        //}

                        /*
                         * //
                         * // Here we let the HTM predict sequence five times on its own.
                         * // We start with last predicted value.
                         * int cnt = 5 * inputValues.Count;
                         *
                         * Debug.WriteLine("---- Start Predicting the Sequence -----");
                         *
                         * //
                         * // This code snippet starts with some input value and tries to predict all next inputs
                         * // as they have been learned as a sequence.
                         * // We take a random value to start somwhere in the sequence.
                         * var predictedInputValue = inputValues[new Random().Next(0, inputValues.Count - 1)].ToString();
                         *
                         * List<string> predictedValues = new List<string>();
                         *
                         * while (--cnt > 0)
                         * {
                         *  //var lyrOut = layer1.Compute(predictedInputValue, learn) as ComputeCycle;
                         *  var lyrOut = layer1.Compute(double.Parse(predictedInputValue[predictedInputValue.Length - 1].ToString()), false) as ComputeCycle;
                         *  predictedInputValue = cls.GetPredictedInputValue(lyrOut.PredictiveCells.ToArray());
                         *  predictedValues.Add(predictedInputValue);
                         * };
                         *
                         * // Now we have a sequence of elements and watch in the trace if it matches to defined input set.
                         * foreach (var item in predictedValues)
                         * {
                         *  Debug.Write(item);
                         *  Debug.Write(" ,");
                         * }*/
                        break;
                    }
                }
                else if (maxMatchCnt > 0)
                {
                    Debug.WriteLine($"At 100% accuracy after {maxMatchCnt} repeats we get a drop of accuracy with {accuracy}. This indicates instable state. Learning will be continued.");
                    maxMatchCnt = 0;
                }
            }

            Debug.WriteLine("---- cell state trace ----");

            cls.TraceState($"cellState_MinPctOverlDuty-{p[KEY.MIN_PCT_OVERLAP_DUTY_CYCLES]}_MaxBoost-{p[KEY.MAX_BOOST]}.csv");

            Debug.WriteLine("---- Spatial Pooler column state  ----");

            foreach (var input in activeColumnsLst)
            {
                using (StreamWriter colSw = new StreamWriter($"ColumState_MinPctOverlDuty-{p[KEY.MIN_PCT_OVERLAP_DUTY_CYCLES]}_MaxBoost-{p[KEY.MAX_BOOST]}_input-{input.Key}.csv"))
                {
                    Debug.WriteLine($"------------ {input.Key} ------------");

                    foreach (var actCols in input.Value)
                    {
                        Debug.WriteLine(Helpers.StringifyVector(actCols.ToArray()));
                        colSw.WriteLine(Helpers.StringifyVector(actCols.ToArray()));
                    }
                }
            }

            Debug.WriteLine("------------ END ------------");
        }
        [DataRow(81, 450, 1000)] //Sparsity - 0.18
        //[DataRow(9, 90, 1000)] //Sparsity - 0.10
        //[DataRow(9, 70, 1000)] //Sparsity - 0.128
        //[DataRow(9, 60, 1000)] //Sparsity - 0.15
        //[DataRow(9, 45, 1000)] //Sparsity - 0.20
        //[DataRow(9, 40, 1000)] //Sparsity - 0.225
        //[DataRow(9, 36, 1000)] //Sparsity - 0.25
        //[DataRow(9, 30, 1000)] //Sparsity - 0.30

        public void HtmSparsity(int W, int InputB, int loop)
        {
            string filename = "Sparsity_" + W + "." + InputB + ".csv";

            using (StreamWriter writer = new StreamWriter(filename))
            {
                Debug.WriteLine($"Learning Cycles: {460}");
                Debug.WriteLine("Cycle;Similarity");

                //Parent loop / Outer loop
                //This loop defines the number of times the experiment will run for the given data
                for (int j = 0; j < loop; j++)
                {
                    int        inputBits = InputB;
                    bool       learn     = true;
                    Parameters p         = Parameters.getAllDefaultParameters();
                    p.Set(KEY.RANDOM, new ThreadSafeRandom(42));
                    p.Set(KEY.INPUT_DIMENSIONS, new int[] { inputBits });
                    p.Set(KEY.CELLS_PER_COLUMN, 5);
                    p.Set(KEY.COLUMN_DIMENSIONS, new int[] { 500 });

                    CortexNetwork       net     = new CortexNetwork("my cortex");
                    List <CortexRegion> regions = new List <CortexRegion>();
                    CortexRegion        region0 = new CortexRegion("1st Region");

                    regions.Add(region0);

                    SpatialPoolerMT sp1 = new SpatialPoolerMT();
                    TemporalMemory  tm1 = new TemporalMemory();
                    var             mem = new Connections();
                    p.apply(mem);
                    sp1.Init(mem, UnitTestHelpers.GetMemory());
                    tm1.Init(mem);

                    Dictionary <string, object> settings = new Dictionary <string, object>()
                    {
                        { "W", W },
                        { "N", inputBits },
                        { "Radius", -1.0 },
                        { "MinVal", 0.0 },
                        // { "MaxVal", 20.0 },
                        { "Periodic", false },
                        { "Name", "scalar" },
                        { "ClipInput", false },
                    };

                    double max = 10;

                    List <double> lst = new List <double>();

                    for (double i = max - 1; i >= 0; i--)
                    {
                        lst.Add(i);
                    }

                    settings["MaxVal"] = max;

                    EncoderBase encoder = new ScalarEncoder(settings);

                    CortexLayer <object, object> layer1 = new CortexLayer <object, object>("L1");
                    //
                    // NewBorn learning stage.
                    region0.AddLayer(layer1);
                    layer1.HtmModules.Add("encoder", encoder);
                    layer1.HtmModules.Add("sp", sp1);

                    HtmClassifier <double, ComputeCycle> cls = new HtmClassifier <double, ComputeCycle>();

                    double[] inputs = lst.ToArray();

                    //
                    // This trains SP.
                    foreach (var input in inputs)
                    {
                        Debug.WriteLine($" ** {input} **");
                        for (int i = 0; i < 3; i++)
                        {
                            var lyrOut = layer1.Compute((object)input, learn) as ComputeCycle;
                        }
                    }

                    // Here we add TM module to the layer.
                    layer1.HtmModules.Add("tm", tm1);

                    int cycle   = 0;
                    int matches = 0;

                    double lastPredictedValue = 0;
                    //
                    // Now, training with SP+TM. SP is pretrained on pattern.
                    //Child loop / Inner loop

                    for (int i = 0; i < 460; i++)
                    {
                        matches = 0;
                        cycle++;
                        foreach (var input in inputs)
                        {
                            var lyrOut = layer1.Compute(input, learn) as ComputeCycle;

                            cls.Learn(input, lyrOut.ActiveCells.ToArray());

                            Debug.WriteLine($"-------------- {input} ---------------");

                            if (learn == false)
                            {
                                Debug.WriteLine($"Inference mode");
                            }

                            Debug.WriteLine($"W: {Helpers.StringifyVector(lyrOut.WinnerCells.Select(c => c.Index).ToArray())}");
                            Debug.WriteLine($"P: {Helpers.StringifyVector(lyrOut.PredictiveCells.Select(c => c.Index).ToArray())}");

                            var predictedValue = cls.GetPredictedInputValue(lyrOut.PredictiveCells.ToArray());

                            Debug.WriteLine($"Current Input: {input} \t| - Predicted value in previous cycle: {lastPredictedValue} \t| Predicted Input for the next cycle: {predictedValue}");

                            if (input == lastPredictedValue)
                            {
                                matches++;
                                Debug.WriteLine($"Match {input}");
                            }
                            else
                            {
                                Debug.WriteLine($"Missmatch Actual value: {input} - Predicted value: {lastPredictedValue}");
                            }

                            lastPredictedValue = predictedValue;
                        }

                        if (i == 500)
                        {
                            Debug.WriteLine("Stop Learning From Here. Entering inference mode.");
                            learn = false;
                        }

                        //tm1.reset(mem);

                        Debug.WriteLine($"Cycle: {cycle}\tMatches={matches} of {inputs.Length}\t {(double)matches / (double)inputs.Length * 100.0}%");
                        if ((double)matches / (double)inputs.Length == 1)
                        {
                            writer.WriteLine($"{cycle}");
                            break;
                        }
                    }
                }
                Debug.WriteLine("New Iteration");
            }
            //cls.TraceState();
            Debug.WriteLine("------------------------------------------------------------------------\n----------------------------------------------------------------------------");
        }
        public void RunGaussianNoiseExperiment()
        {
            const int E_outBits     = 423;
            const int columnsNumber = 2048;

            int[]        SP_Result           = null;
            int[]        SP_NoisyResult      = null;
            List <int[]> SP_Result_List      = new List <int[]>();
            List <int[]> SP_NoisyResult_List = new List <int[]>();

            double[] ham_total = { 0, 0, 0, 0, 0, 0, 0, 0, 0 };
            double[] ham_avg   = { 0, 0, 0, 0, 0, 0, 0, 0, 0 };
            double   ham_upper;
            double   ham_lower;
            int      number_training_set = 41;  // Integer numbers range from -20 to 20 with step of 1.
            int      number_testing_set  = 401; // Decimal numbers range from -20 to 20 with step of 0.1.

            string experimentFolder = nameof(GoussianNoiseExperiment);

            // string SP_noisyinFolder = nameof(GoussianNoiseExperiment);
            Directory.CreateDirectory(experimentFolder);
            // Directory.CreateDirectory(SP_noisyinFolder);
            string SP_inFile      = $"{experimentFolder}\\MyEncoderOut.csv";
            string SP_noisyinFile = $"{experimentFolder}\\MyNoisyEncoderOut.csv";
            //string SP_outFolder = "MySPOutput";
            //string SP_noisyoutFolder = "MyNoisySPOutput";
            //Directory.CreateDirectory(SP_outFolder);
            //Directory.CreateDirectory(SP_noisyoutFolder);
            string SP_outFile      = $"{experimentFolder}\\MySPOut.csv";
            string SP_noisyoutFile = $"{experimentFolder}\\MyNoisySPOut.csv";

            //string testFolder = "MyDraftFoler";
            //Directory.CreateDirectory(testFolder);

            //-------------------------------------------------------
            //|                     PARAMETERS                      |
            //-------------------------------------------------------
            Parameters p = Parameters.getAllDefaultParameters();

            p.Set(KEY.RANDOM, new ThreadSafeRandom(42));

            //------------------SPATIAL POOLER PARAMETERS-----------------
            p.Set(KEY.INPUT_DIMENSIONS, new int[] { E_outBits });
            p.Set(KEY.POTENTIAL_RADIUS, -1);
            p.Set(KEY.POTENTIAL_PCT, 0.75);
            p.Set(KEY.GLOBAL_INHIBITION, true);
            p.Set(KEY.INHIBITION_RADIUS, 15);
            p.Set(KEY.LOCAL_AREA_DENSITY, -1.0);
            p.Set(KEY.NUM_ACTIVE_COLUMNS_PER_INH_AREA, 0.02 * columnsNumber);
            p.Set(KEY.STIMULUS_THRESHOLD, 5);
            p.Set(KEY.SYN_PERM_INACTIVE_DEC, 0.008);
            p.Set(KEY.SYN_PERM_ACTIVE_INC, 0.05);
            p.Set(KEY.SYN_PERM_CONNECTED, 0.10);
            p.Set(KEY.SYN_PERM_BELOW_STIMULUS_INC, 0.01);
            p.Set(KEY.SYN_PERM_TRIM_THRESHOLD, 0.05);

            p.Set(KEY.MIN_PCT_OVERLAP_DUTY_CYCLES, 1);
            p.Set(KEY.MIN_PCT_ACTIVE_DUTY_CYCLES, 0.001);
            p.Set(KEY.DUTY_CYCLE_PERIOD, 100);

            // These values activate powerfull boosting.
            p.Set(KEY.MAX_BOOST, 5);
            p.Set(KEY.DUTY_CYCLE_PERIOD, 100);
            p.Set(KEY.MIN_PCT_OVERLAP_DUTY_CYCLES, 1);

            p.Set(KEY.MAX_BOOST, 10);
            p.Set(KEY.WRAP_AROUND, true);
            p.Set(KEY.LEARN, true);

            //-------------------TEMPORAL MEMORY PARAMETERS----------------
            p.Set(KEY.COLUMN_DIMENSIONS, new int[] { columnsNumber });
            p.Set(KEY.CELLS_PER_COLUMN, 32);
            p.Set(KEY.ACTIVATION_THRESHOLD, 10);
            p.Set(KEY.LEARNING_RADIUS, 10);
            p.Set(KEY.MIN_THRESHOLD, 9);
            p.Set(KEY.MAX_NEW_SYNAPSE_COUNT, 20);
            p.Set(KEY.MAX_SYNAPSES_PER_SEGMENT, 225);
            p.Set(KEY.MAX_SEGMENTS_PER_CELL, 225);
            p.Set(KEY.INITIAL_PERMANENCE, 0.21);
            p.Set(KEY.CONNECTED_PERMANENCE, 0.1);
            p.Set(KEY.PERMANENCE_INCREMENT, 0.10);
            p.Set(KEY.PERMANENCE_DECREMENT, 0.10);
            p.Set(KEY.PREDICTED_SEGMENT_DECREMENT, 0.1);
            p.Set(KEY.LEARN, true);

            //Initiating components of a Cortex Layer
            SpatialPoolerMT sp1 = new SpatialPoolerMT();
            TemporalMemory  tm1 = new TemporalMemory();
            var             mem = new Connections();

            p.apply(mem);
            sp1.Init(mem, UnitTestHelpers.GetMemory());
            tm1.Init(mem);

            HtmClassifier <double, ComputeCycle> cls = new HtmClassifier <double, ComputeCycle>();

            Encoding(E_outBits);

            // Can adjust the number of SP learning cycles below
            for (int cycle = 0; cycle < 320; cycle++)
            {
                if (cycle >= 300)
                {
                    // These activates ew-born effect which switch offs the boosting.
                    //mem.setMaxBoost(0.0);
                    mem.HtmConfig.MaxBoost = 0.0;
                    //mem.updateMinPctOverlapDutyCycles(0.0);
                    mem.HtmConfig.MinPctOverlapDutyCycles = 0.0;
                }

                using (StreamReader sr = new StreamReader(SP_inFile))
                {
                    string line;
                    while ((line = sr.ReadLine()) != null)
                    {
                        string[] tokens   = line.Split(",");
                        int[]    SP_input = new int[E_outBits];
                        for (int i = 0; i < E_outBits; i++)
                        {
                            if (tokens[i + 1] == "0")
                            {
                                SP_input[i] = 0;
                            }
                            else
                            {
                                SP_input[i] = 1;
                            }
                        }
                        SP_Result = sp1.Compute(SP_input, true);
                    }
                }
            }

            using (StreamReader sr = new StreamReader(SP_inFile))
            {
                string line;
                int    lineNumber = 0;
                while ((line = sr.ReadLine()) != null)
                {
                    string[] tokens   = line.Split(",");
                    int[]    SP_input = new int[E_outBits];
                    for (int i = 0; i < E_outBits; i++)
                    {
                        if (tokens[i + 1] == "0")
                        {
                            SP_input[i] = 0;
                        }
                        else
                        {
                            SP_input[i] = 1;
                        }
                    }
                    SP_Result = sp1.Compute(SP_input, false, false);
                    SP_Result_List.Add(SP_Result);
                    int[,] SP_twoDimenArray = ArrayUtils.Make2DArray <int>(SP_Result, 32, 64);
                    var SP_twoDimArray = ArrayUtils.Transpose(SP_twoDimenArray);
                    NeoCortexUtils.DrawBitmap(SP_twoDimArray, 1024, 1024, $"{experimentFolder}\\{lineNumber}.png", Color.DimGray, Color.LawnGreen, text: tokens[0]);
                    lineNumber++;
                }
            }

            using (StreamReader sr = new StreamReader(SP_noisyinFile))
            {
                string line;
                int    lineNumber = 0;
                while ((line = sr.ReadLine()) != null)
                {
                    string[] tokens   = line.Split(",");
                    int[]    SP_input = new int[E_outBits];
                    for (int i = 0; i < E_outBits; i++)
                    {
                        if (tokens[i + 1] == "0")
                        {
                            SP_input[i] = 0;
                        }
                        else
                        {
                            SP_input[i] = 1;
                        }
                    }
                    SP_NoisyResult = sp1.Compute(SP_input, false, false);
                    SP_NoisyResult_List.Add(SP_NoisyResult);
                    var ham = MathHelpers.GetHammingDistance(SP_Result_List[lineNumber], SP_NoisyResult_List[lineNumber], true);
                    Debug.WriteLine($"Noisy input: {tokens[0]} - Hamming NonZ: {ham}");
                    ham = MathHelpers.GetHammingDistance(SP_Result_List[lineNumber], SP_NoisyResult_List[lineNumber], false);
                    Debug.WriteLine($"Noisy input: {tokens[0]} - Hamming All: {ham}");
                    int[,] SP_twoDimenArray = ArrayUtils.Make2DArray <int>(SP_NoisyResult, 32, 64);
                    var SP_twoDimArray = ArrayUtils.Transpose(SP_twoDimenArray);
                    NeoCortexUtils.DrawBitmap(SP_twoDimArray, 1024, 1024, $"{experimentFolder}\\{lineNumber}.png", Color.DimGray, Color.LawnGreen, text: tokens[0]);
                    lineNumber++;
                }
            }

            for (int i = 0; i < number_testing_set - 1; i += 10)
            {
                int count = 1;
                for (int j = i + 1; j < i + 1 + 9; j++)
                {
                    if (i != 0 && i != number_testing_set - 1)
                    {
                        ham_upper             = MathHelpers.GetHammingDistance(SP_NoisyResult_List[i], SP_NoisyResult_List[j], true);
                        ham_lower             = MathHelpers.GetHammingDistance(SP_NoisyResult_List[i], SP_NoisyResult_List[i - count], true);
                        ham_total[count - 1] += ham_upper + ham_lower;
                        count++;
                    }
                    else if (i == 0)
                    {
                        ham_upper             = MathHelpers.GetHammingDistance(SP_NoisyResult_List[i], SP_NoisyResult_List[j], true);
                        ham_total[count - 1] += ham_upper;
                        count++;
                    }
                    else
                    {
                        ham_lower             = MathHelpers.GetHammingDistance(SP_NoisyResult_List[i], SP_NoisyResult_List[i - count], true);
                        ham_total[count - 1] += ham_lower;
                        count++;
                    }
                }
            }
            for (int i = 0; i < 9; i++)
            {
                ham_avg[i] = (ham_total[i] / (number_training_set * 2 - 2));
                Debug.WriteLine($"0.{i + 1} step avg hamming distance: {ham_avg[i]}");
            }
        }