public void SerializationTest() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 1000 }); parameters.setColumnDimensions(new int[] { 2048 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 2048); parameters.setGlobalInhibition(true); var sp = new SpatialPooler(); var mem1 = new Connections(); parameters.apply(mem1); var settings = new JsonSerializerSettings { ContractResolver = new ContractResolver(), Formatting = Formatting.Indented }; var jsonMem = JsonConvert.SerializeObject(mem1, settings); var mem2 = JsonConvert.DeserializeObject <Connections>(jsonMem, settings); sp.init(mem1); var jsonSp = JsonConvert.SerializeObject(sp, settings); var sp2 = JsonConvert.DeserializeObject <SpatialPooler>(jsonSp, settings); #region Binary Serialization DOES NOT WORK /* * MemoryStream ms = new MemoryStream(); * * // Construct a BinaryFormatter and use it to serialize the data to the stream. * BinaryFormatter formatter = new BinaryFormatter(); * try * { * Random x = new Random(1); * formatter.Serialize(ms, x); * } * catch (SerializationException e) * { * Console.WriteLine("Failed to serialize. Reason: " + e.Message); * throw; * } * * ms.Seek(0, SeekOrigin.Begin); * * Connections mem2 = (Connections)formatter.Deserialize(ms); */ #endregion sp.init(mem1); }
internal static void InitPooler(PoolerMode poolerMode, SpatialPooler sp, Connections mem, Parameters parameters = null) { if (poolerMode == PoolerMode.Multinode) { sp.init(mem, UnitTestHelpers.GetMemory(mem.HtmConfig)); } else if (poolerMode == PoolerMode.Multicore) { sp.init(mem, UnitTestHelpers.GetMemory()); } else { sp.init(mem); } }
public void StableOutputOnSameInputTest() { var parameters = GetDefaultParams(); parameters.Set(KEY.POTENTIAL_RADIUS, 64 * 64); parameters.Set(KEY.POTENTIAL_PCT, 1.0); parameters.Set(KEY.GLOBAL_INHIBITION, false); parameters.Set(KEY.STIMULUS_THRESHOLD, 0.5); parameters.Set(KEY.INHIBITION_RADIUS, (int)0.25 * 64 * 64); parameters.Set(KEY.LOCAL_AREA_DENSITY, -1); parameters.Set(KEY.NUM_ACTIVE_COLUMNS_PER_INH_AREA, 0.1 * 64 * 64); parameters.Set(KEY.DUTY_CYCLE_PERIOD, 1000000); parameters.Set(KEY.MAX_BOOST, 5); parameters.setInputDimensions(new int[] { 32, 32 }); parameters.setColumnDimensions(new int[] { 64, 64 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 64 * 64); var sp = new SpatialPooler(); var mem = new Connections(); //List<int> intList = ArrayUtils.ReadCsvFileTest("TestFiles\\digit1_binary_32bit.txt"); //intList.Clear(); //List<int> intList = new List<int>(); int[] inputVector = new int[1024]; for (int i = 0; i < 31; i++) { for (int j = 0; j < 32; j++) { if (i > 2 && i < 5 && j > 2 && j < 8) { inputVector[i * 32 + j] = 1; } else { inputVector[i * 32 + j] = 0; } } } parameters.apply(mem); sp.init(mem); //int[] activeArray = new int[64 * 64]; for (int i = 0; i < 10; i++) { //sp.compute( inputVector, activeArray, true); var activeArray = sp.Compute(inputVector, true) as int[]; var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); } }
public void CollSynapsesToInput() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 32 }); parameters.setColumnDimensions(new int[] { 128 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 128); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); int[] activeArray = new int[128]; int[] inputVector = Helpers.GetRandomVector(32, parameters.Get <Random>(KEY.RANDOM)); for (int i = 0; i < 100; i++) { sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); } }
public HtmModuleNet(Parameters parameters, int[] levels) { for (int levelIndx = 0; levelIndx < levels.Length; levelIndx++) { int levelIn; int levelOut; if (levelIndx == 0) { levelIn = levelOut = levels[levelIndx]; } else { levelIn = connections[levelIndx - 1].getColumnDimensions()[0]; levelOut = levels[levelIndx]; } parameters.setInputDimensions(new int[] { levelIn, levelIn }); parameters.setColumnDimensions(new int[] { levelOut, levelOut }); parameters.Set(KEY.NUM_ACTIVE_COLUMNS_PER_INH_AREA, 0.1 * levelOut * levelOut); var mem = new Connections(); parameters.apply(mem); this.activeArrays.Add(new int[levelOut * levelOut]); this.connections.Add(mem); SpatialPooler sp = new SpatialPooler(); sp.init(mem, null); poolers.Add(sp); } }
public void SPTutorialTest() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 1000 }); parameters.setColumnDimensions(new int[] { 2048 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 2048); parameters.setGlobalInhibition(false); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); int[] activeArray = new int[2048]; int[] inputVector = Helpers.GetRandomVector(1000, parameters.Get <Random>(KEY.RANDOM)); sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); }
public HtmModuleNet(Parameters[] parametersList) { foreach (var prms in parametersList) { var mem = new Connections(); prms.apply(mem); var colDims = prms.Get <int[]>(KEY.COLUMN_DIMENSIONS); int numCols = 1; for (int i = 0; i < colDims.Length; i++) { numCols = numCols * colDims[i]; } this.activeArrays.Add(new int[numCols]); this.connections.Add(mem); SpatialPooler sp = new SpatialPooler(); sp.init(mem, null); poolers.Add(sp); } }
public void SerializationTest1() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 500 }); parameters.setColumnDimensions(new int[] { 2048 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 2048); parameters.setGlobalInhibition(true); var sp1 = new SpatialPooler(); var mem1 = new Connections(); parameters.apply(mem1); sp1.init(mem1); sp1.Serializer("spTesting.json"); string ser = File.ReadAllText("spTesting.json"); var sp2 = SpatialPooler.Deserializer("spTesting.json"); // sp2.Serializer("spTestingDeserialized"); // string des = File.ReadAllText("spTestingDeserialized"); // Assert.IsTrue(ser.SequenceEqual(des)); /* JsonSerializerSettings settings = new JsonSerializerSettings * { * * DefaultValueHandling = DefaultValueHandling.Include, * ObjectCreationHandling = ObjectCreationHandling.Auto, * ReferenceLoopHandling = ReferenceLoopHandling.Serialize, * ConstructorHandling = ConstructorHandling.AllowNonPublicDefaultConstructor, * TypeNameHandling = TypeNameHandling.Auto * * }; * * var jsonData = JsonConvert.SerializeObject(sp1, settings); * File.WriteAllText("spSerializedFile.json", jsonData); * * * var sp2 = JsonConvert.DeserializeObject<SpatialPooler>(File.ReadAllText("spSerializedFile.json"), settings); * var jsonData2 = JsonConvert.SerializeObject(sp2, settings); * * * * File.WriteAllText("spSerializedFile2.json", jsonData2); * * var sp3 = JsonConvert.DeserializeObject<SpatialPooler>(File.ReadAllText("spSerializedFile2.json"), settings); * var jsonData3 = JsonConvert.SerializeObject(sp3, settings); * * File.WriteAllText("spSerializedFile3.json", jsonData3); * * Assert.IsTrue(jsonData2.SequenceEqual(jsonData3)); * Assert.IsTrue(jsonData.SequenceEqual(jsonData2)); */ }
public void StableOutputWithPersistence() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 32, 32 }); parameters.setColumnDimensions(new int[] { 64, 64 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 64 * 64); var mem = new Connections(); parameters.apply(mem); var sp = new SpatialPooler(); sp.init(mem); int[] activeArray = new int[64 * 64]; int[] inputVector = Helpers.GetRandomVector(32 * 32, parameters.Get <Random>(KEY.RANDOM)); string str1 = String.Empty; for (int i = 0; i < 10; i++) { sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); str1 = Helpers.StringifyVector(activeCols); Debug.WriteLine(str1); } var settings = new JsonSerializerSettings { ContractResolver = new ContractResolver(), Formatting = Formatting.Indented }; var jsonSp = JsonConvert.SerializeObject(sp, settings); var sp2 = JsonConvert.DeserializeObject <SpatialPooler>(jsonSp, settings); activeArray = new int[activeArray.Length]; for (int i = 10; i < 20; i++) { sp2.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str2 = Helpers.StringifyVector(activeCols); Debug.WriteLine(str2); Assert.IsTrue(str1.SequenceEqual(str2)); } }
public void SPInhibitionTest() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 10 }); parameters.setColumnDimensions(new int[] { 1024 }); parameters.setNumActiveColumnsPerInhArea(0.2 * 32 * 32); parameters.setGlobalInhibition(false); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); //int[] inputVector = NeoCortexUtils.ReadCsvFileTest("Testfiles\\digit8_binary_32bit.txt").ToArray(); // var inputString = Helpers.StringifyVector(inputVector); //Debug.WriteLine("Input Array: " + inputString); int[] inputVector = new int[] { 1, 0, 0, 0, 1, 1, 1, 0, 1, 1 }; int[] activeArray = new int[32 * 32]; //int iteration = -1; String str = ""; for (int i = 0; i < 10; i++) { var overlaps = sp.CalculateOverlap(mem, inputVector); var strOverlaps = Helpers.StringifyVector(overlaps); var inhibitions = sp.inhibitColumns(mem, ArrayUtils.toDoubleArray(overlaps)); var strInhibitions = Helpers.StringifyVector(inhibitions); activeArray = sp.Compute(inputVector, true) as int[]; //Debug.WriteLine(result); //sp.compute( inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var strActiveArr = Helpers.StringifyVector(activeArray); Debug.WriteLine("Active array: " + strActiveArr); var strActiveCols = Helpers.StringifyVector(activeCols); Debug.WriteLine("Number of Active Column: " + activeCols.Length); str = strActiveCols; Debug.WriteLine($"{i} - {strActiveCols}"); } var strOutput = Helpers.StringifyVector(activeArray); Debug.WriteLine("Output: " + strOutput); }
public void TestMaxDims() { var parameters = SpatialPoolerResearchTests.GetDefaultParams(); parameters.Set(KEY.POTENTIAL_RADIUS, 64 * 64); parameters.Set(KEY.POTENTIAL_PCT, 1.0); parameters.Set(KEY.GLOBAL_INHIBITION, false); parameters.Set(KEY.STIMULUS_THRESHOLD, 0.5); parameters.Set(KEY.INHIBITION_RADIUS, (int)0.25 * 64 * 64); parameters.Set(KEY.LOCAL_AREA_DENSITY, -1); parameters.Set(KEY.NUM_ACTIVE_COLUMNS_PER_INH_AREA, 0.1 * 64 * 64); parameters.Set(KEY.DUTY_CYCLE_PERIOD, 1000000); parameters.Set(KEY.MAX_BOOST, 5); parameters.setInputDimensions(new int[] { 320, 320 }); parameters.setColumnDimensions(new int[] { 2048, 2048 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 64 * 64); var sp = new SpatialPooler(); var mem = new Connections(); //List<int> intList = ArrayUtils.ReadCsvFileTest("TestFiles\\digit1_binary_32bit.txt"); //intList.Clear(); //List<int> intList = new List<int>(); int[] inputVector = new int[1024]; for (int i = 0; i < 31; i++) { for (int j = 0; j < 32; j++) { if (i > 2 && i < 5 && j > 2 && j < 8) { inputVector[i * 32 + j] = 1; } else { inputVector[i * 32 + j] = 0; } } } parameters.apply(mem); sp.init(mem, null); }
public void NeighborhoodTest() { var parameters = GetDefaultParams(); int cellsDim1 = 64; int cellsDim2 = 64; parameters.setInputDimensions(new int[] { 32 }); parameters.setColumnDimensions(new int[] { cellsDim1 }); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); for (int rad = 1; rad < 10; rad++) { using (StreamWriter sw = new StreamWriter($"neighborhood-test-rad{rad}-center-from-{cellsDim1}-to-{0}.csv")) { sw.WriteLine($"{cellsDim1}|{cellsDim2}|{rad}|First column defines center of neiborhood. All other columns define indicies of neiborhood columns"); for (int center = 0; center < 64; center++) { var nbs = HtmCompute.GetNeighborhood(center, rad, mem.getColumnTopology().HtmTopology); StringBuilder sb = new StringBuilder(); sb.Append(center); sb.Append('|'); foreach (var neighobordCellIndex in nbs) { sb.Append(neighobordCellIndex); sb.Append('|'); } string str = sb.ToString(); sw.WriteLine(str.TrimEnd('|')); } } } }
public void TestMethod2() { var parameters = Helpers.GetDefaultParams();; var imageWidth = 60; var imageHeight = 60; int outputWidth = 120; int outputHeight = 120; String trainingImageName = "Lamp.png"; String predictionImageNameShift = "Lamp75Shift.PNG"; bool withTraining = true; StringBuilder reportFile = new StringBuilder(); reportFile.Append($"Given Image Width: {imageWidth}"); reportFile.AppendLine(); reportFile.Append($"Given Image Height: {imageHeight}"); reportFile.AppendLine(); reportFile.Append($"Given Output Column Width: {outputWidth}"); reportFile.AppendLine(); reportFile.Append($"Given Output Column Height: {outputHeight}"); reportFile.AppendLine(); parameters.setInputDimensions(new int[] { imageWidth, imageHeight }); parameters.setColumnDimensions(new int[] { outputWidth, outputHeight }); parameters.setNumActiveColumnsPerInhArea(0.02 * outputWidth * outputHeight); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); int[] activeArray = new int[outputWidth * outputHeight]; int[] inputVector = ReadImageData(trainingImageName, imageHeight, imageWidth); int[] newActiveArray = new int[outputWidth * outputHeight]; double[][] newActiveArrayDouble = new double[1][]; newActiveArrayDouble[0] = new double[newActiveArray.Length]; if (withTraining) { //Training sp.compute(mem, inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); double[][] oldActiveArrayDouble = new double[1][]; oldActiveArrayDouble[0] = new double[activeArray.Length]; for (int i = 0; i < activeArray.Length; i++) { oldActiveArrayDouble[0][i] = activeArray[i]; } int isTrained = 0; while (isTrained == 0) { sp.compute(mem, inputVector, newActiveArray, true); activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); for (int i = 0; i < newActiveArray.Length; i++) { newActiveArrayDouble[0][i] = newActiveArray[i]; } if (GetHammingDistances(oldActiveArrayDouble, newActiveArrayDouble, true)[0] == 100) { isTrained = 1; } else { isTrained = 0; oldActiveArrayDouble = newActiveArrayDouble; } } var str = Helpers.StringifyVector(activeCols); reportFile.AppendLine(); reportFile.Append($"Active Columns of Trained Image({trainingImageName}):"); reportFile.AppendLine(); reportFile.Append(str); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Trained Image({trainingImageName}): {activeCols.Length}"); reportFile.AppendLine(); } else { //Without Training sp.compute(mem, inputVector, newActiveArray, false); var activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); reportFile.AppendLine(); reportFile.Append($"Active Columns of Untrained Image({trainingImageName}):"); reportFile.AppendLine(); reportFile.Append(str); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Untrained Image({trainingImageName}): {activeCols.Length}"); reportFile.AppendLine(); } //Prediction with Shift int[] inputVectorShift = ReadImageData(predictionImageNameShift, imageHeight, imageWidth); int[] activeArrayShift = new int[outputWidth * outputHeight]; sp.compute(mem, inputVectorShift, activeArrayShift, false); var resActiveColsShift = ArrayUtils.IndexWhere(activeArrayShift, (el) => el == 1); var resStrShift = Helpers.StringifyVector(resActiveColsShift); for (int i = 0; i < newActiveArray.Length; i++) { newActiveArrayDouble[0][i] = newActiveArray[i]; } double[][] activeArrayShiftDouble = new double[1][]; activeArrayShiftDouble[0] = new double[activeArrayShift.Length]; for (int i = 0; i < activeArrayShift.Length; i++) { activeArrayShiftDouble[0][i] = activeArrayShift[i]; } double hammingDistancePercentage = GetHammingDistances(newActiveArrayDouble, activeArrayShiftDouble, true)[0]; reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Trained Image with Shift({predictionImageNameShift}):"); reportFile.AppendLine(); reportFile.Append(resStrShift); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Trained Image with Shift({predictionImageNameShift}): {resActiveColsShift.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of same Image with Shift({predictionImageNameShift}): {100-hammingDistancePercentage}%"); reportFile.AppendLine(); reportFile.Append($"Output Overlap for Trained Image({trainingImageName}) and Prediction of same Image with Shift({predictionImageNameShift}): Output Overlap: {hammingDistancePercentage/100}"); reportFile.AppendLine(); using (StreamWriter writer = File.CreateText(Path.Combine(AppContext.BaseDirectory, $"Output/report-{trainingImageName}.txt"))) { string text = reportFile.ToString(); writer.Write(text); } }
public void TestMethod1() { var parameters = Helpers.GetDefaultParams();; var imageWidth = 30; var imageHeight = 30; int outputWidth = 60; int outputHeight = 60; String trainingImageName = "Lamp25Shift.PNG"; String predictionImageNameShift = "Lamp25Shift.PNG"; String predictionImageNameRotate = "LampRotate.PNG"; String predictionImageNameDifferent = "Fish.PNG"; bool withTraining = true; StringBuilder reportFile = new StringBuilder(); reportFile.Append($"Given Image Width: {imageWidth}"); reportFile.AppendLine(); reportFile.Append($"Given Image Height: {imageHeight}"); reportFile.AppendLine(); reportFile.Append($"Given Output Column Width: {outputWidth}"); reportFile.AppendLine(); reportFile.Append($"Given Output Column Height: {outputHeight}"); reportFile.AppendLine(); parameters.setInputDimensions(new int[] { imageWidth, imageHeight }); parameters.setColumnDimensions(new int[] { outputWidth, outputHeight }); parameters.setNumActiveColumnsPerInhArea(0.02 * outputWidth * outputHeight); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); int[] activeArray = new int[outputWidth * outputHeight]; int[] inputVector = ReadImageData(trainingImageName, imageHeight, imageWidth); int[] newActiveArray = new int[outputWidth * outputHeight]; if (withTraining) { //Training sp.compute(mem, inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); int[] oldActiveArray = activeArray; int flag = 0; while (flag == 0) { sp.compute(mem, inputVector, newActiveArray, true); activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); if (GetHammingDistance(oldActiveArray, newActiveArray) == 0) { flag = 1; } else { flag = 0; oldActiveArray = newActiveArray; } } var str = Helpers.StringifyVector(activeCols); reportFile.AppendLine(); reportFile.Append($"Active Columns of Trained Image({trainingImageName}):"); reportFile.AppendLine(); reportFile.Append(str); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Trained Image({trainingImageName}): {activeCols.Length}"); reportFile.AppendLine(); } else { //Without Training sp.compute(mem, inputVector, newActiveArray, false); var activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); reportFile.AppendLine(); reportFile.Append($"Active Columns of Untrained Image({trainingImageName}):"); reportFile.AppendLine(); reportFile.Append(str); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Untrained Image({trainingImageName}): {activeCols.Length}"); reportFile.AppendLine(); } //Prediction with Shift int[] inputVectorShift = ReadImageData(predictionImageNameShift, imageHeight, imageWidth); int[] activeArrayShift = new int[outputWidth * outputHeight]; sp.compute(mem, inputVectorShift, activeArrayShift, false); var resActiveColsShift = ArrayUtils.IndexWhere(activeArrayShift, (el) => el == 1); var resStrShift = Helpers.StringifyVector(resActiveColsShift); int hamDistShift = GetHammingDistance(newActiveArray, activeArrayShift); reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Trained Image with Shift({predictionImageNameShift}):"); reportFile.AppendLine(); reportFile.Append(resStrShift); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Trained Image with Shift({predictionImageNameShift}): {resActiveColsShift.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance between Trained Image({trainingImageName}) and Prediction of same Image with Shift({predictionImageNameShift}): {hamDistShift}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of same Image with Shift({predictionImageNameShift}): {Math.Round((double)(hamDistShift * 100) / (outputWidth * outputHeight), 4)}%"); reportFile.AppendLine(); //Prediction with 90 degree Rotate int[] inputVectorR90 = ReadImageData(predictionImageNameRotate, imageHeight, imageWidth); int[] activeArrayR90 = new int[outputWidth * outputHeight]; sp.compute(mem, inputVectorR90, activeArrayR90, false); var resActiveColsR90 = ArrayUtils.IndexWhere(activeArrayR90, (el) => el == 1); var resStrR90 = Helpers.StringifyVector(resActiveColsR90); int hamDistR90 = GetHammingDistance(newActiveArray, activeArrayR90); reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Trained Image with 90 degrees rotate({predictionImageNameRotate}):"); reportFile.AppendLine(); reportFile.Append(resStrR90); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Trained Image with 90 degrees rotate({predictionImageNameRotate}): {resActiveColsR90.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance between Trained Image({trainingImageName}) and Prediction of same Image with 90 degrees rotate({predictionImageNameRotate}): {hamDistR90}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of same Image with 90 degrees rotate({predictionImageNameRotate}): {Math.Round((double)(hamDistR90 * 100) / (outputWidth * outputHeight), 4)}%"); reportFile.AppendLine(); //Prediction with Different Image int[] inputVectorDifferent = ReadImageData(predictionImageNameDifferent, imageHeight, imageWidth); int[] activeArrayDifferent = new int[outputWidth * outputHeight]; sp.compute(mem, inputVectorDifferent, activeArrayDifferent, false); var resActiveColsDifferent = ArrayUtils.IndexWhere(activeArrayDifferent, (el) => el == 1); var resStrDifferent = Helpers.StringifyVector(resActiveColsDifferent); int hamDistDifferent = GetHammingDistance(newActiveArray, activeArrayDifferent); reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Different Image than Trained Image({predictionImageNameDifferent}):"); reportFile.AppendLine(); reportFile.Append(resStrDifferent); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Different Image than Trained Image({predictionImageNameDifferent}): {resActiveColsDifferent.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance between Trained Image({trainingImageName}) and Prediction of Different Image than Trained Image({predictionImageNameDifferent}): {hamDistDifferent}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of Different Image than Trained Image({predictionImageNameDifferent}): {Math.Round((double)(hamDistDifferent * 100) / (outputWidth * outputHeight), 4)}%"); reportFile.AppendLine(); //Prediction with Noise of given percentages double[] givenNoisePercentages = { 0, 1, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; for (int i = 0; i < givenNoisePercentages.Length; i++) { int[] oldInputVector = ReadImageData(trainingImageName, imageHeight, imageWidth); int[] inputVectorNoisePercent = MakeSomeNoise(oldInputVector, givenNoisePercentages[i] / 100); int[] activeArrayNoisePercent = new int[outputWidth * outputHeight]; MakeBinaryFile(inputVectorNoisePercent, imageHeight, imageWidth, $"{trainingImageName}-Noise{givenNoisePercentages[i]}p"); sp.compute(mem, inputVectorNoisePercent, activeArrayNoisePercent, false); var resActiveColsPercent = ArrayUtils.IndexWhere(activeArrayNoisePercent, (el) => el == 1); var resStrPercent = Helpers.StringifyVector(resActiveColsPercent); int hamDistNoisePercent = GetHammingDistance(newActiveArray, activeArrayNoisePercent); reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Trained Image({trainingImageName}) with Noise of {givenNoisePercentages[i]} percent:"); reportFile.AppendLine(); reportFile.Append(resStrPercent); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Trained Image({trainingImageName}) with Noise of {givenNoisePercentages[i]} percent: {resActiveColsPercent.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance between Trained Image({trainingImageName}) and Prediction of same Image with Noise of {givenNoisePercentages[i]} percent: {hamDistNoisePercent}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of same Image with Noise of {givenNoisePercentages[i]} percent: {Math.Round((double)(hamDistNoisePercent*100) / (outputWidth * outputHeight), 4)}%"); reportFile.AppendLine(); reportFile.Append($"Output Overlap versus Input Overlap for Trained Image({trainingImageName}) and Prediction of same Image with Noise of {givenNoisePercentages[i]} percent: Output Overlap: {1 - Math.Round((double)(hamDistNoisePercent * 100) / (outputWidth * outputHeight), 4) / 100}, Input Overlap: {1 - givenNoisePercentages[i] / 100}"); reportFile.AppendLine(); } using (StreamWriter writer = File.CreateText(Path.Combine(AppContext.BaseDirectory, $"Output/report-{trainingImageName}.txt"))) { string text = reportFile.ToString(); writer.Write(text); } }
// [DataRow("MnistTestImages\\digit7.png", 128, 30)] public void LearningimageDoubleShiftStble(string mnistImage, string shiftedImage, int[] imageSize, int[] topologies) { var path = Path.Combine(Directory.GetParent(Environment.CurrentDirectory).Parent.Parent.FullName, mnistImage); var pathShifted = Path.Combine(Directory.GetParent(Environment.CurrentDirectory).Parent.Parent.FullName, shiftedImage); Console.WriteLine("Test started"); Console.WriteLine(mnistImage); Console.WriteLine(shiftedImage); const int OutImgSize = 1024; int index1 = mnistImage.IndexOf("\\") + 1; int index2 = mnistImage.IndexOf("."); string sub1 = mnistImage.Substring(0, index2); string sub2 = mnistImage.Substring(0, index1); string name = mnistImage.Substring(index1, sub1.Length - sub2.Length); int index1Shift = shiftedImage.IndexOf("\\") + 1; int index2Shift = shiftedImage.IndexOf("."); string sub1Shift = shiftedImage.Substring(0, index2Shift); string sub2Shift = shiftedImage.Substring(0, index1Shift); string nameShift = shiftedImage.Substring(index1Shift, sub1Shift.Length - sub2Shift.Length); for (int imSizeIndx = 0; imSizeIndx < imageSize.Length; imSizeIndx++) { Console.WriteLine(String.Format("Image Size: \t{0}", imageSize[imSizeIndx])); string testName = $"{name}_{imageSize[imSizeIndx]}"; string outputSpeedFile = $"Output\\{testName}_speed.txt"; string testNameShifted = $"{nameShift}_{imageSize[imSizeIndx]}"; string outputSpeedFileShifted = $"Output\\{testNameShifted}_speed.txt"; string inputBinaryImageFile = BinarizeImage(path, imageSize[imSizeIndx], testName); string inputBinaryImageShiftedFile = BinarizeImage(pathShifted, imageSize[imSizeIndx], testNameShifted); //string inputBinaryImageFile = BinarizeImage("Output\\" + mnistImage, imageSize[imSizeIndx], testName); for (int topologyIndx = 0; topologyIndx < topologies.Length; topologyIndx++) { Console.WriteLine(String.Format("Topology: \t{0}", topologies[topologyIndx])); string finalName = $"{testName}_{topologies[topologyIndx]}"; string outputHamDistFile = $"Output\\{finalName}_hamming.txt"; string outputActColFile = $"Output\\{finalName}_activeCol.txt"; string outputImage = $"Output\\{finalName}.png"; string finalNameShifted = $"{testNameShifted}_{topologies[topologyIndx]}"; string outputHamDistFileShifted = $"Output\\{finalNameShifted}_hamming.txt"; string outputActColFileShifted = $"Output\\{finalNameShifted}_activeCol.txt"; string outputImageShifted = $"Output\\{finalNameShifted}.png"; //File.Create(finalName); //Directory.CreateDirectory("Output"); //File.Create(outputHamDistFile); //File.Create(outputActColFile); int numOfActCols = 0; var sw = new Stopwatch(); using (StreamWriter swHam = new StreamWriter(outputHamDistFile)) { using (StreamWriter swSpeed = new StreamWriter(outputSpeedFile, true)) { using (StreamWriter swActCol = new StreamWriter(outputActColFile)) { numOfActCols = topologies[topologyIndx] * topologies[topologyIndx]; var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { imageSize[imSizeIndx], imageSize[imSizeIndx] }); parameters.setColumnDimensions(new int[] { topologies[topologyIndx], topologies[topologyIndx] }); parameters.setNumActiveColumnsPerInhArea(0.02 * numOfActCols); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sw.Start(); sp.init(mem); sw.Stop(); swSpeed.WriteLine($"{topologies[topologyIndx]}|{(double)sw.ElapsedMilliseconds / (double)1000}"); int actiColLen = numOfActCols; int[] activeArray = new int[actiColLen]; //Read input csv file into array int[] inputVector = NeoCortexUtils.ReadCsvFileTest(inputBinaryImageFile).ToArray(); var inputOverlap = new List <double>(); var outputOverlap = new List <double>(); int[] newActiveArray = new int[topologies[topologyIndx] * topologies[topologyIndx]]; double[][] newActiveArrayDouble = new double[1][]; newActiveArrayDouble[0] = new double[newActiveArray.Length]; //Training sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); double[][] oldActiveArray = new double[1][]; oldActiveArray[0] = new double[activeArray.Length]; for (int a = 0; a < activeArray.Length; a++) { oldActiveArray[0][a] = activeArray[a]; } int isTrained = 0; while (isTrained == 0) { sp.compute(inputVector, newActiveArray, true); activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); for (int a = 0; a < newActiveArray.Length; a++) { newActiveArrayDouble[0][a] = newActiveArray[a]; } if (MathHelpers.GetHammingDistance(oldActiveArray, newActiveArrayDouble, true)[0] == 100) { isTrained = 1; } else { isTrained = 0; oldActiveArray = newActiveArrayDouble; } } var str = Helpers.StringifyVector(activeCols); int[] oldInputVector = NeoCortexUtils.ReadCsvFileTest(inputBinaryImageFile).ToArray(); int[] inputVectorShifted = NeoCortexUtils.ReadCsvFileTest(inputBinaryImageShiftedFile).ToArray(); int[] activeArrayShifted = new int[topologies[topologyIndx] * topologies[topologyIndx]]; double[][] activeArrayShiftedDouble = new double[1][]; activeArrayShiftedDouble[0] = new double[activeArrayShifted.Length]; sw.Restart(); //Prediction sp.compute(inputVectorShifted, activeArrayShifted, false); var resActiveColsPercent = ArrayUtils.IndexWhere(activeArrayShifted, (el) => el == 1); var resStrPercent = Helpers.StringifyVector(activeArrayShifted); for (int a = 0; a < activeArrayShifted.Length; a++) { activeArrayShiftedDouble[0][a] = activeArrayShifted[a]; } var distance = MathHelpers.GetHammingDistance(newActiveArrayDouble, activeArrayShiftedDouble, false)[0]; var distPercent = ((100 - distance) * 100) / (topologies[topologyIndx] * topologies[topologyIndx]); swHam.WriteLine(distance + "\t" + (100 - distance) + "\t" + distPercent + "\t" + (1 - (distPercent / 100.0)) + "\n"); outputOverlap.Add(1 - (distPercent / 100.0)); swActCol.WriteLine(String.Format(@"Active Cols: {0}", Helpers.StringifyVector(resActiveColsPercent))); Console.WriteLine(resStrPercent); swActCol.WriteLine("Active Array: " + resStrPercent); sw.Stop(); int[,] twoDimenArray = ArrayUtils.Make2DArray <int>(activeArrayShifted, topologies[topologyIndx], topologies[topologyIndx]); twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimenArray, OutImgSize, OutImgSize, outputImage); //NeoCortexUtils.DrawBitmap(twoDimenArray, OutImgSize, OutImgSize, "D:\\Project_Latest\\FromGit\\se-dystsys-2018-2019-softwareengineering\\outputs\\eight"); swActCol.WriteLine("inputOverlaps: " + Helpers.StringifyVector(inputOverlap.ToArray())); swActCol.WriteLine("outputOverlaps: " + Helpers.StringifyVector(outputOverlap.ToArray())); } } } } } }
//[DataRow("MnistTestImages\\digit7.png", 128, 30)] public void CalculateSpeedOfLearningTest(string mnistImage, int[] imageSize, int[] topologies) { int index1 = mnistImage.IndexOf("\\") + 1; int index2 = mnistImage.IndexOf("."); string sub1 = mnistImage.Substring(0, index2); string sub2 = mnistImage.Substring(0, index1); string name = mnistImage.Substring(index1, sub1.Length - sub2.Length); for (int imSizeIndx = 0; imSizeIndx < imageSize.Length; imSizeIndx++) { string testName = $"{name}_{imageSize[imSizeIndx]}"; string outputSpeedFile = $"Output\\{testName}_speed.txt"; string inputBinaryImageFile = BinarizeImage("Output\\" + mnistImage, imageSize[imSizeIndx], testName); for (int topologyIndx = 0; topologyIndx < topologies.Length; topologyIndx++) { string finalName = $"{testName}_{topologies[topologyIndx]}"; string outputHamDistFile = $"Output\\{finalName}_hamming.txt"; string outputActColFile = $"Output\\{finalName}_activeCol.txt"; string outputImage = $"Output\\{finalName}.png"; int numOfActCols = 0; var sw = new Stopwatch(); using (StreamWriter swHam = new StreamWriter(outputHamDistFile)) { using (StreamWriter swSpeed = new StreamWriter(outputSpeedFile, true)) { using (StreamWriter swActCol = new StreamWriter(outputActColFile)) { numOfActCols = topologies[topologyIndx] * topologies[topologyIndx]; var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { imageSize[imSizeIndx], imageSize[imSizeIndx] }); parameters.setColumnDimensions(new int[] { topologies[topologyIndx], topologies[topologyIndx] }); parameters.setNumActiveColumnsPerInhArea(0.02 * numOfActCols); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sw.Start(); sp.init(mem); sw.Stop(); swSpeed.WriteLine($"{topologies[topologyIndx]}|{(double)sw.ElapsedMilliseconds / (double)1000}"); int actiColLen = numOfActCols; int[] activeArray = new int[actiColLen]; //Read input csv file into array int[] inputVector = NeoCortexUtils.ReadCsvFileTest(inputBinaryImageFile).ToArray(); sw.Restart(); int iterations = 2; int[] oldArray = new int[activeArray.Length]; for (int k = 0; k < iterations; k++) { sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var distance = MathHelpers.GetHammingDistance(oldArray, activeArray); swHam.WriteLine(distance + "\n"); var str = Helpers.StringifyVector(activeCols); oldArray = new int[actiColLen]; activeArray.CopyTo(oldArray, 0); } var activeStr = Helpers.StringifyVector(activeArray); swActCol.WriteLine("Active Array: " + activeStr); sw.Stop(); int[,] twoDimenArray = ArrayUtils.Make2DArray <int>(activeArray, topologies[topologyIndx], topologies[topologyIndx]); twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimenArray, OutImgSize, OutImgSize, outputImage); } } } } } }
public void TestMethod1() { //Arrays initialization int[] inputX1 = new int[1000]; int[] inputX2 = new int[1000]; int[] outputX1 = new int[2048]; int[] outputX2 = new int[2048]; //feeding inputX1 with random binary numbers for (int j = 0; j < 1000; j++) { var random = new Random(); inputX1[j] = random.Next(0, 2); } //Copying inputX1 to inputX2 to get 2 identical vectors for (int i = 0; i < 1000; i++) { inputX2[i] = inputX1[i]; } // Array containing noise levels (in percentage) float[] Noise = new float[] { 0, 5, 10, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; // Arrays where input and output overlap results will be stored double[] percentOverlapArrayInput = new double[Noise.Length]; double[] percentOverlapArrayOutput = new double[Noise.Length]; //Initialization and configuration of the spatial pooler var parameters = Helpers.GetDefaultParams(); parameters.setInputDimensions(new int[] { 1000 }); parameters.setColumnDimensions(new int[] { 2048 }); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); for (int j = 0; j < Noise.Length; j++) { // inputX1 is equivalent to inputX2 before the loop and can be then used as the reference one to which noise will be added inputX2 = corruptVector(ref inputX1, Noise[j]); //Feeding outputX1 and outputX2 to the spatial pooler sp.compute(mem, inputX1, outputX1, false); sp.compute(mem, inputX2, outputX2, false); //computing the input and output overlap and putting results in arrays percentOverlapArrayInput[j] = percentOverlap(inputX1, inputX2); percentOverlapArrayOutput[j] = percentOverlap(outputX1, outputX2); } // Writing the input overlap and output overlap arrays to text files WriteArrayToFile("OverlapInput.txt", percentOverlapArrayInput); WriteArrayToFile("OverlapOutput.txt", percentOverlapArrayOutput); //calling this method here will launch create-plots-Part2b script that will plot graphs with data contained in the files runPythonCode("OverlapInput.txt", "OverlapOutput.txt"); }
public void CategorySequenceExperiment() { bool learn = true; Parameters p = Parameters.getAllDefaultParameters(); p.Set(KEY.RANDOM, new ThreadSafeRandom(42)); p.Set(KEY.INPUT_DIMENSIONS, new int[] { 100 }); p.Set(KEY.CELLS_PER_COLUMN, 30); string[] categories = new string[] { "A", "B", "C", "D" }; //string[] categories = new string[] { "A", "B", "C", "D", "E", "F", "G", "H", "I", "K", "L" , "M", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "Ö" }; CortexNetwork net = new CortexNetwork("my cortex"); List <CortexRegion> regions = new List <CortexRegion>(); CortexRegion region0 = new CortexRegion("1st Region"); regions.Add(region0); SpatialPooler sp1 = new SpatialPooler(); TemporalMemory tm1 = new TemporalMemory(); var mem = new Connections(); p.apply(mem); sp1.init(mem, UnitTestHelpers.GetMemory()); tm1.init(mem); Dictionary <string, object> settings = new Dictionary <string, object>(); //settings.Add("W", 25); settings.Add("N", 100); //settings.Add("Radius", 1); EncoderBase encoder = new CategoryEncoder(categories, settings); //encoder.Encode() CortexLayer <object, object> layer1 = new CortexLayer <object, object>("L1"); region0.AddLayer(layer1); layer1.HtmModules.Add("encoder", encoder); layer1.HtmModules.Add("sp", sp1); //layer1.HtmModules.Add(tm1); //layer1.Compute(); //IClassifier<string, ComputeCycle> cls = new HtmClassifier<string, ComputeCycle>(); HtmClassifier <string, ComputeCycle> cls = new HtmClassifier <string, ComputeCycle>(); HtmUnionClassifier <string, ComputeCycle> cls1 = new HtmUnionClassifier <string, ComputeCycle>(); //string[] inputs = new string[] { "A", "B", "C", "D" }; string[] inputs = new string[] { "A", "B", "C", "D" }; // // This trains SP. foreach (var input in inputs) { Debug.WriteLine($" ** {input} **"); for (int i = 0; i < 3; i++) { var lyrOut = layer1.Compute((object)input, learn) as ComputeCycle; } } sp1.Serializer("spCSTSerialized.json"); var sp2 = SpatialPooler.Deserializer("spCSTSerialized.json"); layer1.HtmModules.Remove("sp"); layer1.HtmModules.Add("sp", sp2); // Here we add TM module to the layer. layer1.HtmModules.Add("tm", tm1); // // Now, training with SP+TM. SP is pretrained on pattern. for (int i = 0; i < 200; i++) { foreach (var input in inputs) { var lyrOut = layer1.Compute(input, learn) as ComputeCycle; //cls1.Learn(input, lyrOut.activeCells.ToArray(), learn); //Debug.WriteLine($"Current Input: {input}"); cls.Learn(input, lyrOut.ActiveCells.ToArray(), lyrOut.predictiveCells.ToArray()); Debug.WriteLine($"Current Input: {input}"); if (learn == false) { Debug.WriteLine($"Predict Input When Not Learn: {cls.GetPredictedInputValue(lyrOut.predictiveCells.ToArray())}"); } else { Debug.WriteLine($"Predict Input: {cls.GetPredictedInputValue(lyrOut.predictiveCells.ToArray())}"); } Debug.WriteLine("-----------------------------------------------------------\n----------------------------------------------------------"); } if (i == 10) { Debug.WriteLine("Stop Learning From Here----------------------------"); learn = false; } // tm1.reset(mem); } Debug.WriteLine("------------------------------------------------------------------------\n----------------------------------------------------------------------------"); /* * learn = false; * for (int i = 0; i < 19; i++) * { * foreach (var input in inputs) * { * layer1.Compute((object)input, learn); * } * } */ sp1.Serialize("tm.serialize.json"); }
public void TestMethod1() { //number of binary vectors that will be used to train the SP int numExamples = 10; //2 dimensional input vector. Each row will be used as one-dimensional vector int[,] inputVectors = new int[numExamples, 1000]; //2 dimensional output binary vector int[,] outputColumns = new int[numExamples, 2048]; //Feeding here the input vector with random binary numbers for (int i = 0; i < numExamples; i++) { for (int k = 0; k < 1000; k++) { var random = new Random(); inputVectors[i, k] = random.Next(0, 2); } } //Spatial Pooler initialization and configuration var parameters = Helpers.GetDefaultParams(); parameters.setInputDimensions(new int[] { 1000 }); parameters.setColumnDimensions(new int[] { 2048 }); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); //One dimensional input vector initialized with binary numbers int[] inputVector = Helpers.GetRandomVector(1000, parameters.Get <Random>(KEY.RANDOM)); //Array that will be expose to SP to active some of its columns int[] activeCols = new int[2048]; //List for active columns scores List <int> activeColScores = new List <int>(); //Learning is turned off sp.compute(mem, inputVector, activeCols, false); //overlaps score of the active columns var overlaps = sp.calculateOverlap(mem, inputVector); //Sorting reversely overlaps arrays then writing it to the text file overlapBeforeTraining.txt overlaps = reverseSort(overlaps); WriteIntArrayToFile("overlapBeforeTraining.txt", overlaps); //we put the overlap score of each active column in a list called activeColScores for (int i = 0; i < 2048; i++) { if (activeCols[i] != 0) { activeColScores.Add(overlaps[i]); } } int numberOfActivCols = 0; //counting the number of active columns foreach (int i in activeColScores) { numberOfActivCols = numberOfActivCols + 1; } // Array to take the number of active columns to write it to file for plotting purpose int[] numberOfActiveColumns = new int[] { 1 }; numberOfActiveColumns[0] = numberOfActivCols; int[] inputVectorsRowk = new int[] { }; int[] outputColumnsRowk = new int[] { }; //number of times the vectors are exposed to the SP int epochs = 1; //the "numExamples" input binary vectors are exposed to the SP "epochs" times to train it for (int i = 0; i < epochs; i++) { for (int k = 0; k < numExamples; k++) { inputVectorsRowk = GetRow(inputVectors, k); outputColumnsRowk = GetRow(outputColumns, k); sp.compute(mem, inputVectorsRowk, outputColumnsRowk, true); } } overlaps = sp.calculateOverlap(mem, inputVectorsRowk); overlaps = reverseSort(overlaps); WriteIntArrayToFile("overlapAfterTraining.txt", overlaps); WriteIntArrayToFile("numberOfActCols.txt", numberOfActiveColumns); //overlap before and after training , numberOfActCols needed for graphs runPythonCode2("overlapBeforeTraining.txt", "overlapAfterTraining.txt", "numberOfActCols.txt"); int[,] inputVectorsCorrupted = new int[numExamples, 1000]; int[,] outputColumnsCorrupted = new int[numExamples, 2048]; float[] Noise = new float[] { 0, 5, 10, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; double[] percentOverlapInputs = new double[Noise.Length]; double[] percentOverlapOutputs = new double[Noise.Length]; int[] inputVectorsCorruptedRow0 = GetRow(inputVectorsCorrupted, 0); int[] inputVectorsRow0 = GetRow(inputVectors, 0); int[] outputColumnsRow0 = GetRow(outputColumns, 0); int[] outputColumnsCorruptedRow0 = GetRow(outputColumnsCorrupted, 0); resetVector(inputVectorsRow0, inputVectorsCorruptedRow0); for (int i = 0; i < Noise.Length; i++) { inputVectorsCorruptedRow0 = corruptVector(ref inputVectorsRow0, Noise[i]); sp.compute(mem, inputVectorsRow0, outputColumnsRow0, false); sp.compute(mem, inputVectorsCorruptedRow0, outputColumnsCorruptedRow0, false); percentOverlapInputs[i] = percentOverlap(inputVectorsRow0, inputVectorsCorruptedRow0); percentOverlapOutputs[i] = percentOverlap(outputColumnsRow0, outputColumnsCorruptedRow0); } WriteDoubleArrayToFile("percentOverlapInputs.txt", percentOverlapInputs); WriteDoubleArrayToFile("percentOverlapOutputs.txt", percentOverlapOutputs); runPythonCode1("percentOverlapInputs.txt", "percentOverlapOutputs.txt"); }
public void SerializationTestWithTrainedData() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 16 * 16 }); parameters.setColumnDimensions(new int[] { 32 * 32 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 32 * 32); parameters.setMinPctOverlapDutyCycles(0.01); var mem = new Connections(); parameters.apply(mem); var sp1 = new SpatialPooler(); sp1.init(mem); int[] activeArray = new int[32 * 32]; int[] inputVector = Helpers.GetRandomVector(16 * 16, parameters.Get <Random>(KEY.RANDOM)); /* int [] inputVector = { * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0 }; * */ string str1 = String.Empty; for (int i = 0; i < 5; i++) { sp1.compute(inputVector, activeArray, true); var activeCols1 = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); str1 = Helpers.StringifyVector(activeCols1); Debug.WriteLine(str1); } /* JsonSerializerSettings settings = new JsonSerializerSettings * { * * DefaultValueHandling = DefaultValueHandling.Include, * ObjectCreationHandling = ObjectCreationHandling.Auto, * ReferenceLoopHandling = ReferenceLoopHandling.Serialize, * ConstructorHandling = ConstructorHandling.AllowNonPublicDefaultConstructor, * TypeNameHandling = TypeNameHandling.Auto * * }; */ // var jsConverted = JsonConvert.SerializeObject(sp1, Formatting.Indented, settings); // string file2 = "spSerializeTrain-newtonsoft.json"; // File.WriteAllText(file2, jsConverted); sp1.Serializer("spTrain1.json"); string ser1 = File.ReadAllText("spTrain1.json"); var sp2 = SpatialPooler.Deserializer("spTrain1.json"); sp2.Serializer("spTrainDes1.json"); string des1 = File.ReadAllText("spTrainDes1.json"); Assert.IsTrue(ser1.SequenceEqual(des1)); // SpatialPooler sp2 = JsonConvert.DeserializeObject<SpatialPooler>(File.ReadAllText(file2), settings); for (int i = 5; i < 10; i++) { sp2.compute(inputVector, activeArray, false); var activeCols2 = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str2 = Helpers.StringifyVector(activeCols2); Debug.WriteLine(str2); Assert.IsTrue(str1.SequenceEqual(str2)); } sp2.Serializer("spTrain2.json"); string ser2 = File.ReadAllText("spTrain2.json"); // Assert.IsTrue(ser2.SequenceEqual(des1)); /* string serializedSecondPooler = JsonConvert.SerializeObject(sp2, Formatting.Indented, settings); * string fileSecondPooler = "spSerializeTrain-secondpooler-newtonsoft.json"; * File.WriteAllText(fileSecondPooler, serializedSecondPooler); * * SpatialPooler sp3 = JsonConvert.DeserializeObject<SpatialPooler>(File.ReadAllText(fileSecondPooler), settings); * string serializedThirdPooler = JsonConvert.SerializeObject(sp3, Formatting.Indented, settings); * * * Assert.IsTrue(serializedThirdPooler.SequenceEqual(serializedSecondPooler), "Third and second poolers are not equal"); * Assert.IsTrue(jsConverted.SequenceEqual(serializedSecondPooler), "First and second poolers are not equal"); */ }