public void CollSynapsesToInput() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 32 }); parameters.setColumnDimensions(new int[] { 128 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 128); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.Init(mem); int[] activeArray = new int[128]; int[] inputVector = Helpers.GetRandomVector(32, parameters.Get <Random>(KEY.RANDOM)); for (int i = 0; i < 100; i++) { sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); } }
public void SPTutorialTest() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 1000 }); parameters.setColumnDimensions(new int[] { 2048 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 2048); parameters.setGlobalInhibition(false); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.Init(mem); int[] activeArray = new int[2048]; int[] inputVector = Helpers.GetRandomVector(1000, parameters.Get <Random>(KEY.RANDOM)); sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); }
public void StableOutputWithPersistence() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 32, 32 }); parameters.setColumnDimensions(new int[] { 64, 64 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 64 * 64); var mem = new Connections(); parameters.apply(mem); var sp = new SpatialPooler(); sp.Init(mem); int[] activeArray = new int[64 * 64]; int[] inputVector = Helpers.GetRandomVector(32 * 32, parameters.Get <Random>(KEY.RANDOM)); string str1 = String.Empty; for (int i = 0; i < 2; i++) { sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); str1 = Helpers.StringifyVector(activeCols); Debug.WriteLine(str1); } var settings = new JsonSerializerSettings { ContractResolver = new ContractResolver(), Formatting = Formatting.Indented }; var jsonSp = JsonConvert.SerializeObject(sp, settings); var sp2 = JsonConvert.DeserializeObject <SpatialPooler>(jsonSp, settings); activeArray = new int[activeArray.Length]; for (int i = 10; i < 20; i++) { sp2.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str2 = Helpers.StringifyVector(activeCols); Debug.WriteLine(str2); Assert.IsTrue(str1.SequenceEqual(str2)); } }
public void ExperimentTest(string inputBinarizedFile) { var parameters = SetupParameters(32, 64, 4096, true); parameters.Set(KEY.DUTY_CYCLE_PERIOD, 100000); parameters.Set(KEY.MAX_BOOST, 1.0); parameters.Set(KEY.IS_BUMPUP_WEAKCOLUMNS_DISABLED, true); var sp = new SpatialPooler(); var mem = new Connections(); int[] inputVector = NeoCortexUtils.ReadCsvFileTest(inputBinarizedFile).ToArray(); int[] activeArray = new int[4096]; parameters.apply(mem); sp.Init(mem); for (int i = 0; i < 1000; i++) { sp.compute(inputVector, activeArray, true); var activeCols = activeArray.IndexWhere((el) => el == 1); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); } }
public void SchemaImageClassificationTest() { var imagesFolder = "..\\..\\..\\TestFiles\\SchemaImageClassification\\image"; var csvPath = "..\\..\\..\\TestFiles\\SchemaImageClassification\\csv"; string[] files = Directory.GetFiles(imagesFolder, "*", SearchOption.AllDirectories); string path, name; (path, name) = GetPathAndName(files[0]); int activeColumn = 60; int inputDimension = 32; string[] imageNames = new string[files.Length]; SpatialPooler sp = new SpatialPooler(); Connections mem = new Connections(); Parameters config = GetParam(inputDimension, activeColumn); config.apply(mem); sp.Init(mem); List <int[]> activeArray = new List <int[]>();// to store n active sequences for n images // For each image in this directory for (int i = 0; i < files.Length; i++) { // TODO: activeArray: is a buffer to store active column sequence from spatial pooler activeArray.Add(new int[activeColumn * activeColumn]); (path, name) = GetPathAndName(files[i]); imageNames[i] = name; string binaryImagePath = BinarizeImage(path, inputDimension, inputDimension, csvPath, name); int[] inputVector = ReadCsvFileTest(binaryImagePath).ToArray(); // 1D binary of a image List <int[]> tempArr = new List <int[]>(); tempArr.Add(new int[activeColumn * activeColumn]); tempArr.Add(new int[activeColumn * activeColumn]); int iter = -1; int id = 0; while (true) // Train spatial pooler with a single image until stable active column sequence is achieve { id = (++iter & 1) == 0 ? 0 : 1; for (int i_x = 0; i_x < tempArr[id].Length; i_x++) { tempArr[id][i_x] = 0; } sp.compute(inputVector, tempArr[id], true); if (iter == 1) { continue; } var d = GetHammingDistance(tempArr[id], tempArr[id ^ 1], false); if (d != double.NegativeInfinity) { Console.WriteLine(d); } /*Note: GetHamming distance will return a distance between 0 and 100. * if two sequences are the same the return value will be 100. Therefore, the similarity is (100==distance). * To give a four digit precision, < is used instead of ==. */ if ((100.0 - d) < 0.00001) { for (int i_x = 0; i_x < tempArr[id].Length; i_x++) { activeArray[i][i_x] = tempArr[id][i_x]; } break; } } Console.WriteLine("finished image : " + i); } int[] parent = new int[files.Length]; bool[] hasFolder = new bool[files.Length]; List <double[]> distance = new List <double[]>(); for (int k = 0; k < files.Length; k++) { distance.Add(new double[files.Length]); for (int kk = 0; kk < files.Length; kk++) { distance[k].Append(0); } } for (int k = 0; k < files.Length; k++) { parent[k] = k; hasFolder[k] = false; } for (int i = 0; i < activeArray.Count; i++) { for (int j = i; j < activeArray.Count; j++) { double d = Math.Min(GetHammingDistance(activeArray[i], activeArray[j], true), GetHammingDistance(activeArray[j], activeArray[i], true)); //double d = GetHammingDistance(activeArray[i], activeArray[j], false); distance[i][j] = d; distance[j][i] = d; } } SaveOutput(activeArray, path, imageNames); Groupping(distance, imageNames, path); Report(distance, imageNames); }
/// <summary> /// This function train the input image and write result to text files in folder @"/OutputDutyCycle" /// The result text files include speed comparison between global inhibition and local inhibition, /// the stable of the out put array (by comparing hamming distance arrays). /// Finally this method draw an image of active column as .png file. /// This training method is used for testing speed of training with different value of max boost and duty cycle /// </summary> /// <param name="inputBinarizedFile">input image after binarized</param> /// <param name="hammingFile">Path to hamming distance output file</param> /// <param name="outputSpeedFile">Path to speed comparison output file</param> /// <param name="outputImage">Path to active column after training output file (as .png image file)</param> /// <param name="parameters">Parameter setup</param> private static void Training(string inputBinarizedFile, string hammingFile, string outputSpeedFile, string outputImage, Parameters parameters) { int outputImageSize = 1024; int topology = parameters.Get <int[]>(KEY.COLUMN_DIMENSIONS)[0]; int activeColumn = topology * topology; var stopwatch = new Stopwatch(); using (StreamWriter swHamming = new StreamWriter(hammingFile)) { using (StreamWriter swSpeed = new StreamWriter(outputSpeedFile, true)) { var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); stopwatch.Start(); sp.Init(mem); stopwatch.Stop(); int actiColumnLength = activeColumn; int[] activeArray = new int[actiColumnLength]; // Read input csv file into array int[] inputVector = NeoCortexUtils.ReadCsvFileTest(inputBinarizedFile).ToArray(); stopwatch.Restart(); int iterations = 1000; int[] oldArray = new int[activeArray.Length]; for (int k = 0; k < iterations; k++) { sp.compute(inputVector, activeArray, true); var activeCols = activeArray.IndexWhere((el) => el == 1); var distance = MathHelpers.GetHammingDistance(oldArray, activeArray); var similarity = MathHelpers.CalcArraySimilarity(oldArray, activeArray); swHamming.WriteLine($"{distance} | {similarity}"); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); oldArray = new int[actiColumnLength]; activeArray.CopyTo(oldArray, 0); } var activeArrayString = Helpers.StringifyVector(activeArray); stopwatch.Stop(); Debug.WriteLine("Active Array: " + activeArrayString); int potentialRadius = parameters.Get <int>(KEY.POTENTIAL_RADIUS); bool isGlobalInhibition = parameters.Get <bool>(KEY.GLOBAL_INHIBITION); string inhibition = isGlobalInhibition ? "Global" : "Local"; double milliseconds = stopwatch.ElapsedMilliseconds; double seconds = milliseconds / 1000; swSpeed.WriteLine($"Column dimension: {topology.ToString().PadRight(5)} |Potential Radius: {potentialRadius}| Inhibition type: {inhibition.PadRight(7)} | Total time: {milliseconds:N0} milliseconds ({seconds:N2} seconds)."); int[,] twoDimenArray = ArrayUtils.Make2DArray(activeArray, topology, topology); twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimenArray, outputImageSize, outputImageSize, outputImage); } } }
/// <summary> /// This function train the input image and write result to text files in folder @"/Output" /// The result text files include speed comparison between global inhibition and local inhibition, /// the stable of the out put array (by comparing hamming distance arrays). /// Finally this method draw an image of active column as .png file. /// </summary> /// <param name="imageSize">Size of the image (image has same width and height)</param> /// <param name="columnDimension">List of sparse space size.(with same width and height)</param> /// <param name="inputBinarizedFile">input image after binarized</param> /// <param name="hammingFile">Path to hamming distance output file </param> /// <param name="outputSpeedFile">Path to speed comparison output file</param> /// <param name="activeColumnFile">Path to active column after training output file (as array text)</param> /// <param name="outputImage">Path to active column after training output file (as .png image file)</param> /// <param name="isGlobalInhibition">is using Global inhibition algorithms or not (if false using local inhibition)</param> private static void Training(int imageSize, int columnDimension, string inputBinarizedFile, string hammingFile, string outputSpeedFile, string activeColumnFile, string outputImage, bool isGlobalInhibition) { int outputImageSize = 1024; int activeColumn = columnDimension * columnDimension; var stopwatch = new Stopwatch(); using (StreamWriter swHamming = new StreamWriter(hammingFile)) { using (StreamWriter swSpeed = new StreamWriter(outputSpeedFile, true)) { using (StreamWriter swActiveColumn = new StreamWriter(activeColumnFile)) { var parameters = SetupParameters(imageSize, columnDimension, isGlobalInhibition); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); stopwatch.Start(); sp.Init(mem); stopwatch.Stop(); int actiColumnLength = activeColumn; int[] activeArray = new int[actiColumnLength]; // Read input csv file into array int[] inputVector = NeoCortexUtils.ReadCsvFileTest(inputBinarizedFile).ToArray(); stopwatch.Restart(); int iterations = 300; int[] oldArray = new int[activeArray.Length]; for (int k = 0; k < iterations; k++) { sp.compute(inputVector, activeArray, true); var activeCols = activeArray.IndexWhere((el) => el == 1); var distance = MathHelpers.GetHammingDistance(oldArray, activeArray); var similarity = MathHelpers.CalcArraySimilarity(oldArray, activeArray); swHamming.WriteLine($"{distance} | {similarity}"); var str = Helpers.StringifyVector(activeCols); Debug.WriteLine(str); oldArray = new int[actiColumnLength]; activeArray.CopyTo(oldArray, 0); } stopwatch.Stop(); var activeArrayString = Helpers.StringifyVector(activeArray); swActiveColumn.WriteLine("Active Array: " + activeArrayString); string inhibition = isGlobalInhibition ? "Global" : "Local"; double milliseconds = stopwatch.ElapsedMilliseconds; double seconds = milliseconds / 1000; swSpeed.WriteLine($"Topology: {columnDimension.ToString().PadRight(5)} | Inhibition type: {inhibition.PadRight(7)} | Total time: {milliseconds:N0} milliseconds ({seconds:N2} seconds)."); int[,] twoDimenArray = ArrayUtils.Make2DArray(activeArray, columnDimension, columnDimension); twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimenArray, outputImageSize, outputImageSize, outputImage); } } } }
public void SerializationTestWithTrainedData() { // TODO: see SpatialPooler_Stability_Experiment_3() // use it here. var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 16 * 16 }); parameters.setColumnDimensions(new int[] { 32 * 32 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 32 * 32); parameters.setMinPctOverlapDutyCycles(0.01); var mem = new Connections(); parameters.apply(mem); var sp1 = new SpatialPooler(); sp1.Init(mem); int[] output = new int[32 * 32]; int[] inputVector = Helpers.GetRandomVector(16 * 16, parameters.Get <Random>(KEY.RANDOM)); /* int [] inputVector = { * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0 }; * */ string str1 = String.Empty; for (int i = 0; i < 5; i++) { sp1.compute(inputVector, output, true); var activeCols1 = ArrayUtils.IndexWhere(output, (el) => el == 1); // Remember SDR for every input. // if(i >= 4) // sdrs.Add(output) str1 = Helpers.StringifyVector(activeCols1); Debug.WriteLine(str1); } // if stable // HtmSerializer ser = new HtmSerializer(); // ser.Serialize(sp, "sp.json"); // var sp2 = HtmSerializer.Load("sp.json"); // Implement a code to compare sdrs of sp2 that have been learned with sp1. string ser1 = File.ReadAllText("spTrain1.json"); //var sp2 = SpatialPooler.Deserializer("spTrain1.json"); //sp2.Serializer("spTrainDes1.json"); //string des1 = File.ReadAllText("spTrainDes1.json"); //Assert.IsTrue(ser1.SequenceEqual(des1)); //for (int i = 5; i < 10; i++) //{ // sp1.compute(inputVector, sdrArray, false); // var sdr2 = ArrayUtils.IndexWhere(sdrArray, (el) => el == 1); // // Compare sdr with sdr1 of the same input // var str2 = Helpers.StringifyVector(sdr2); // Debug.WriteLine(str2); // Assert.IsTrue(str1.SequenceEqual(str2)); //} }
public void TestMethod2() { var parameters = Helpers.GetDefaultParams();; var imageWidth = 60; var imageHeight = 60; int outputWidth = 120; int outputHeight = 120; String trainingImageName = "Lamp.png"; String predictionImageNameShift = "Lamp75Shift.PNG"; bool withTraining = true; StringBuilder reportFile = new StringBuilder(); reportFile.Append($"Given Image Width: {imageWidth}"); reportFile.AppendLine(); reportFile.Append($"Given Image Height: {imageHeight}"); reportFile.AppendLine(); reportFile.Append($"Given Output Column Width: {outputWidth}"); reportFile.AppendLine(); reportFile.Append($"Given Output Column Height: {outputHeight}"); reportFile.AppendLine(); parameters.setInputDimensions(new int[] { imageWidth, imageHeight }); parameters.setColumnDimensions(new int[] { outputWidth, outputHeight }); parameters.setNumActiveColumnsPerInhArea(0.02 * outputWidth * outputHeight); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); int[] activeArray = new int[outputWidth * outputHeight]; int[] inputVector = ReadImageData(trainingImageName, imageHeight, imageWidth); int[] newActiveArray = new int[outputWidth * outputHeight]; double[][] newActiveArrayDouble = new double[1][]; newActiveArrayDouble[0] = new double[newActiveArray.Length]; if (withTraining) { //Training sp.compute(mem, inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); double[][] oldActiveArrayDouble = new double[1][]; oldActiveArrayDouble[0] = new double[activeArray.Length]; for (int i = 0; i < activeArray.Length; i++) { oldActiveArrayDouble[0][i] = activeArray[i]; } int isTrained = 0; while (isTrained == 0) { sp.compute(mem, inputVector, newActiveArray, true); activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); for (int i = 0; i < newActiveArray.Length; i++) { newActiveArrayDouble[0][i] = newActiveArray[i]; } if (GetHammingDistances(oldActiveArrayDouble, newActiveArrayDouble, true)[0] == 100) { isTrained = 1; } else { isTrained = 0; oldActiveArrayDouble = newActiveArrayDouble; } } var str = Helpers.StringifyVector(activeCols); reportFile.AppendLine(); reportFile.Append($"Active Columns of Trained Image({trainingImageName}):"); reportFile.AppendLine(); reportFile.Append(str); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Trained Image({trainingImageName}): {activeCols.Length}"); reportFile.AppendLine(); } else { //Without Training sp.compute(mem, inputVector, newActiveArray, false); var activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); reportFile.AppendLine(); reportFile.Append($"Active Columns of Untrained Image({trainingImageName}):"); reportFile.AppendLine(); reportFile.Append(str); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Untrained Image({trainingImageName}): {activeCols.Length}"); reportFile.AppendLine(); } //Prediction with Shift int[] inputVectorShift = ReadImageData(predictionImageNameShift, imageHeight, imageWidth); int[] activeArrayShift = new int[outputWidth * outputHeight]; sp.compute(mem, inputVectorShift, activeArrayShift, false); var resActiveColsShift = ArrayUtils.IndexWhere(activeArrayShift, (el) => el == 1); var resStrShift = Helpers.StringifyVector(resActiveColsShift); for (int i = 0; i < newActiveArray.Length; i++) { newActiveArrayDouble[0][i] = newActiveArray[i]; } double[][] activeArrayShiftDouble = new double[1][]; activeArrayShiftDouble[0] = new double[activeArrayShift.Length]; for (int i = 0; i < activeArrayShift.Length; i++) { activeArrayShiftDouble[0][i] = activeArrayShift[i]; } double hammingDistancePercentage = GetHammingDistances(newActiveArrayDouble, activeArrayShiftDouble, true)[0]; reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Trained Image with Shift({predictionImageNameShift}):"); reportFile.AppendLine(); reportFile.Append(resStrShift); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Trained Image with Shift({predictionImageNameShift}): {resActiveColsShift.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of same Image with Shift({predictionImageNameShift}): {100-hammingDistancePercentage}%"); reportFile.AppendLine(); reportFile.Append($"Output Overlap for Trained Image({trainingImageName}) and Prediction of same Image with Shift({predictionImageNameShift}): Output Overlap: {hammingDistancePercentage/100}"); reportFile.AppendLine(); using (StreamWriter writer = File.CreateText(Path.Combine(AppContext.BaseDirectory, $"Output/report-{trainingImageName}.txt"))) { string text = reportFile.ToString(); writer.Write(text); } }
public void TestMethod1() { var parameters = Helpers.GetDefaultParams();; var imageWidth = 30; var imageHeight = 30; int outputWidth = 60; int outputHeight = 60; String trainingImageName = "Lamp25Shift.PNG"; String predictionImageNameShift = "Lamp25Shift.PNG"; String predictionImageNameRotate = "LampRotate.PNG"; String predictionImageNameDifferent = "Fish.PNG"; bool withTraining = true; StringBuilder reportFile = new StringBuilder(); reportFile.Append($"Given Image Width: {imageWidth}"); reportFile.AppendLine(); reportFile.Append($"Given Image Height: {imageHeight}"); reportFile.AppendLine(); reportFile.Append($"Given Output Column Width: {outputWidth}"); reportFile.AppendLine(); reportFile.Append($"Given Output Column Height: {outputHeight}"); reportFile.AppendLine(); parameters.setInputDimensions(new int[] { imageWidth, imageHeight }); parameters.setColumnDimensions(new int[] { outputWidth, outputHeight }); parameters.setNumActiveColumnsPerInhArea(0.02 * outputWidth * outputHeight); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); int[] activeArray = new int[outputWidth * outputHeight]; int[] inputVector = ReadImageData(trainingImageName, imageHeight, imageWidth); int[] newActiveArray = new int[outputWidth * outputHeight]; if (withTraining) { //Training sp.compute(mem, inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); int[] oldActiveArray = activeArray; int flag = 0; while (flag == 0) { sp.compute(mem, inputVector, newActiveArray, true); activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); if (GetHammingDistance(oldActiveArray, newActiveArray) == 0) { flag = 1; } else { flag = 0; oldActiveArray = newActiveArray; } } var str = Helpers.StringifyVector(activeCols); reportFile.AppendLine(); reportFile.Append($"Active Columns of Trained Image({trainingImageName}):"); reportFile.AppendLine(); reportFile.Append(str); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Trained Image({trainingImageName}): {activeCols.Length}"); reportFile.AppendLine(); } else { //Without Training sp.compute(mem, inputVector, newActiveArray, false); var activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); var str = Helpers.StringifyVector(activeCols); reportFile.AppendLine(); reportFile.Append($"Active Columns of Untrained Image({trainingImageName}):"); reportFile.AppendLine(); reportFile.Append(str); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Untrained Image({trainingImageName}): {activeCols.Length}"); reportFile.AppendLine(); } //Prediction with Shift int[] inputVectorShift = ReadImageData(predictionImageNameShift, imageHeight, imageWidth); int[] activeArrayShift = new int[outputWidth * outputHeight]; sp.compute(mem, inputVectorShift, activeArrayShift, false); var resActiveColsShift = ArrayUtils.IndexWhere(activeArrayShift, (el) => el == 1); var resStrShift = Helpers.StringifyVector(resActiveColsShift); int hamDistShift = GetHammingDistance(newActiveArray, activeArrayShift); reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Trained Image with Shift({predictionImageNameShift}):"); reportFile.AppendLine(); reportFile.Append(resStrShift); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Trained Image with Shift({predictionImageNameShift}): {resActiveColsShift.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance between Trained Image({trainingImageName}) and Prediction of same Image with Shift({predictionImageNameShift}): {hamDistShift}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of same Image with Shift({predictionImageNameShift}): {Math.Round((double)(hamDistShift * 100) / (outputWidth * outputHeight), 4)}%"); reportFile.AppendLine(); //Prediction with 90 degree Rotate int[] inputVectorR90 = ReadImageData(predictionImageNameRotate, imageHeight, imageWidth); int[] activeArrayR90 = new int[outputWidth * outputHeight]; sp.compute(mem, inputVectorR90, activeArrayR90, false); var resActiveColsR90 = ArrayUtils.IndexWhere(activeArrayR90, (el) => el == 1); var resStrR90 = Helpers.StringifyVector(resActiveColsR90); int hamDistR90 = GetHammingDistance(newActiveArray, activeArrayR90); reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Trained Image with 90 degrees rotate({predictionImageNameRotate}):"); reportFile.AppendLine(); reportFile.Append(resStrR90); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Trained Image with 90 degrees rotate({predictionImageNameRotate}): {resActiveColsR90.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance between Trained Image({trainingImageName}) and Prediction of same Image with 90 degrees rotate({predictionImageNameRotate}): {hamDistR90}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of same Image with 90 degrees rotate({predictionImageNameRotate}): {Math.Round((double)(hamDistR90 * 100) / (outputWidth * outputHeight), 4)}%"); reportFile.AppendLine(); //Prediction with Different Image int[] inputVectorDifferent = ReadImageData(predictionImageNameDifferent, imageHeight, imageWidth); int[] activeArrayDifferent = new int[outputWidth * outputHeight]; sp.compute(mem, inputVectorDifferent, activeArrayDifferent, false); var resActiveColsDifferent = ArrayUtils.IndexWhere(activeArrayDifferent, (el) => el == 1); var resStrDifferent = Helpers.StringifyVector(resActiveColsDifferent); int hamDistDifferent = GetHammingDistance(newActiveArray, activeArrayDifferent); reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Different Image than Trained Image({predictionImageNameDifferent}):"); reportFile.AppendLine(); reportFile.Append(resStrDifferent); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Different Image than Trained Image({predictionImageNameDifferent}): {resActiveColsDifferent.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance between Trained Image({trainingImageName}) and Prediction of Different Image than Trained Image({predictionImageNameDifferent}): {hamDistDifferent}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of Different Image than Trained Image({predictionImageNameDifferent}): {Math.Round((double)(hamDistDifferent * 100) / (outputWidth * outputHeight), 4)}%"); reportFile.AppendLine(); //Prediction with Noise of given percentages double[] givenNoisePercentages = { 0, 1, 5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; for (int i = 0; i < givenNoisePercentages.Length; i++) { int[] oldInputVector = ReadImageData(trainingImageName, imageHeight, imageWidth); int[] inputVectorNoisePercent = MakeSomeNoise(oldInputVector, givenNoisePercentages[i] / 100); int[] activeArrayNoisePercent = new int[outputWidth * outputHeight]; MakeBinaryFile(inputVectorNoisePercent, imageHeight, imageWidth, $"{trainingImageName}-Noise{givenNoisePercentages[i]}p"); sp.compute(mem, inputVectorNoisePercent, activeArrayNoisePercent, false); var resActiveColsPercent = ArrayUtils.IndexWhere(activeArrayNoisePercent, (el) => el == 1); var resStrPercent = Helpers.StringifyVector(resActiveColsPercent); int hamDistNoisePercent = GetHammingDistance(newActiveArray, activeArrayNoisePercent); reportFile.AppendLine(); reportFile.Append($"Active Columns of Prediction of Trained Image({trainingImageName}) with Noise of {givenNoisePercentages[i]} percent:"); reportFile.AppendLine(); reportFile.Append(resStrPercent); reportFile.AppendLine(); reportFile.Append($"Number of Active Columns of Prediction of Trained Image({trainingImageName}) with Noise of {givenNoisePercentages[i]} percent: {resActiveColsPercent.Length}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance between Trained Image({trainingImageName}) and Prediction of same Image with Noise of {givenNoisePercentages[i]} percent: {hamDistNoisePercent}"); reportFile.AppendLine(); reportFile.Append($"Hamming Distance in % between Trained Image({trainingImageName}) and Prediction of same Image with Noise of {givenNoisePercentages[i]} percent: {Math.Round((double)(hamDistNoisePercent*100) / (outputWidth * outputHeight), 4)}%"); reportFile.AppendLine(); reportFile.Append($"Output Overlap versus Input Overlap for Trained Image({trainingImageName}) and Prediction of same Image with Noise of {givenNoisePercentages[i]} percent: Output Overlap: {1 - Math.Round((double)(hamDistNoisePercent * 100) / (outputWidth * outputHeight), 4) / 100}, Input Overlap: {1 - givenNoisePercentages[i] / 100}"); reportFile.AppendLine(); } using (StreamWriter writer = File.CreateText(Path.Combine(AppContext.BaseDirectory, $"Output/report-{trainingImageName}.txt"))) { string text = reportFile.ToString(); writer.Write(text); } }
// [DataRow("MnistTestImages\\digit7.png", 128, 30)] public void LearningimageDoubleShiftStble(string mnistImage, string shiftedImage, int[] imageSize, int[] topologies) { var path = Path.Combine(Directory.GetParent(Environment.CurrentDirectory).Parent.Parent.FullName, mnistImage); var pathShifted = Path.Combine(Directory.GetParent(Environment.CurrentDirectory).Parent.Parent.FullName, shiftedImage); Console.WriteLine("Test started"); Console.WriteLine(mnistImage); Console.WriteLine(shiftedImage); const int OutImgSize = 1024; int index1 = mnistImage.IndexOf("\\") + 1; int index2 = mnistImage.IndexOf("."); string sub1 = mnistImage.Substring(0, index2); string sub2 = mnistImage.Substring(0, index1); string name = mnistImage.Substring(index1, sub1.Length - sub2.Length); int index1Shift = shiftedImage.IndexOf("\\") + 1; int index2Shift = shiftedImage.IndexOf("."); string sub1Shift = shiftedImage.Substring(0, index2Shift); string sub2Shift = shiftedImage.Substring(0, index1Shift); string nameShift = shiftedImage.Substring(index1Shift, sub1Shift.Length - sub2Shift.Length); for (int imSizeIndx = 0; imSizeIndx < imageSize.Length; imSizeIndx++) { Console.WriteLine(String.Format("Image Size: \t{0}", imageSize[imSizeIndx])); string testName = $"{name}_{imageSize[imSizeIndx]}"; string outputSpeedFile = $"Output\\{testName}_speed.txt"; string testNameShifted = $"{nameShift}_{imageSize[imSizeIndx]}"; string outputSpeedFileShifted = $"Output\\{testNameShifted}_speed.txt"; string inputBinaryImageFile = BinarizeImage(path, imageSize[imSizeIndx], testName); string inputBinaryImageShiftedFile = BinarizeImage(pathShifted, imageSize[imSizeIndx], testNameShifted); //string inputBinaryImageFile = BinarizeImage("Output\\" + mnistImage, imageSize[imSizeIndx], testName); for (int topologyIndx = 0; topologyIndx < topologies.Length; topologyIndx++) { Console.WriteLine(String.Format("Topology: \t{0}", topologies[topologyIndx])); string finalName = $"{testName}_{topologies[topologyIndx]}"; string outputHamDistFile = $"Output\\{finalName}_hamming.txt"; string outputActColFile = $"Output\\{finalName}_activeCol.txt"; string outputImage = $"Output\\{finalName}.png"; string finalNameShifted = $"{testNameShifted}_{topologies[topologyIndx]}"; string outputHamDistFileShifted = $"Output\\{finalNameShifted}_hamming.txt"; string outputActColFileShifted = $"Output\\{finalNameShifted}_activeCol.txt"; string outputImageShifted = $"Output\\{finalNameShifted}.png"; //File.Create(finalName); //Directory.CreateDirectory("Output"); //File.Create(outputHamDistFile); //File.Create(outputActColFile); int numOfActCols = 0; var sw = new Stopwatch(); using (StreamWriter swHam = new StreamWriter(outputHamDistFile)) { using (StreamWriter swSpeed = new StreamWriter(outputSpeedFile, true)) { using (StreamWriter swActCol = new StreamWriter(outputActColFile)) { numOfActCols = topologies[topologyIndx] * topologies[topologyIndx]; var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { imageSize[imSizeIndx], imageSize[imSizeIndx] }); parameters.setColumnDimensions(new int[] { topologies[topologyIndx], topologies[topologyIndx] }); parameters.setNumActiveColumnsPerInhArea(0.02 * numOfActCols); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sw.Start(); sp.Init(mem); sw.Stop(); swSpeed.WriteLine($"{topologies[topologyIndx]}|{(double)sw.ElapsedMilliseconds / (double)1000}"); int actiColLen = numOfActCols; int[] activeArray = new int[actiColLen]; //Read input csv file into array int[] inputVector = NeoCortexUtils.ReadCsvIntegers(inputBinaryImageFile).ToArray(); var inputOverlap = new List <double>(); var outputOverlap = new List <double>(); int[] newActiveArray = new int[topologies[topologyIndx] * topologies[topologyIndx]]; double[][] newActiveArrayDouble = new double[1][]; newActiveArrayDouble[0] = new double[newActiveArray.Length]; //Training sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); double[][] oldActiveArray = new double[1][]; oldActiveArray[0] = new double[activeArray.Length]; for (int a = 0; a < activeArray.Length; a++) { oldActiveArray[0][a] = activeArray[a]; } int isTrained = 0; while (isTrained == 0) { sp.compute(inputVector, newActiveArray, true); activeCols = ArrayUtils.IndexWhere(newActiveArray, (el) => el == 1); for (int a = 0; a < newActiveArray.Length; a++) { newActiveArrayDouble[0][a] = newActiveArray[a]; } if (MathHelpers.GetHammingDistance(oldActiveArray, newActiveArrayDouble, true)[0] == 100) { isTrained = 1; } else { isTrained = 0; oldActiveArray = newActiveArrayDouble; } } var str = Helpers.StringifyVector(activeCols); int[] oldInputVector = NeoCortexUtils.ReadCsvIntegers(inputBinaryImageFile).ToArray(); int[] inputVectorShifted = NeoCortexUtils.ReadCsvIntegers(inputBinaryImageShiftedFile).ToArray(); int[] activeArrayShifted = new int[topologies[topologyIndx] * topologies[topologyIndx]]; double[][] activeArrayShiftedDouble = new double[1][]; activeArrayShiftedDouble[0] = new double[activeArrayShifted.Length]; sw.Restart(); //Prediction sp.compute(inputVectorShifted, activeArrayShifted, false); var resActiveColsPercent = ArrayUtils.IndexWhere(activeArrayShifted, (el) => el == 1); var resStrPercent = Helpers.StringifyVector(activeArrayShifted); for (int a = 0; a < activeArrayShifted.Length; a++) { activeArrayShiftedDouble[0][a] = activeArrayShifted[a]; } var distance = MathHelpers.GetHammingDistance(newActiveArrayDouble, activeArrayShiftedDouble, false)[0]; var distPercent = ((100 - distance) * 100) / (topologies[topologyIndx] * topologies[topologyIndx]); swHam.WriteLine(distance + "\t" + (100 - distance) + "\t" + distPercent + "\t" + (1 - (distPercent / 100.0)) + "\n"); outputOverlap.Add(1 - (distPercent / 100.0)); swActCol.WriteLine(String.Format(@"Active Cols: {0}", Helpers.StringifyVector(resActiveColsPercent))); Console.WriteLine(resStrPercent); swActCol.WriteLine("Active Array: " + resStrPercent); sw.Stop(); int[,] twoDimenArray = ArrayUtils.Make2DArray <int>(activeArrayShifted, topologies[topologyIndx], topologies[topologyIndx]); twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimenArray, OutImgSize, OutImgSize, outputImage); //NeoCortexUtils.DrawBitmap(twoDimenArray, OutImgSize, OutImgSize, "D:\\Project_Latest\\FromGit\\se-dystsys-2018-2019-softwareengineering\\outputs\\eight"); swActCol.WriteLine("inputOverlaps: " + Helpers.StringifyVector(inputOverlap.ToArray())); swActCol.WriteLine("outputOverlaps: " + Helpers.StringifyVector(outputOverlap.ToArray())); } } } } } }
//[DataRow("MnistTestImages\\digit7.png", 128, 30)] public void CalculateSpeedOfLearningTest(string mnistImage, int[] imageSize, int[] topologies) { int index1 = mnistImage.IndexOf("\\") + 1; int index2 = mnistImage.IndexOf("."); string sub1 = mnistImage.Substring(0, index2); string sub2 = mnistImage.Substring(0, index1); string name = mnistImage.Substring(index1, sub1.Length - sub2.Length); for (int imSizeIndx = 0; imSizeIndx < imageSize.Length; imSizeIndx++) { string testName = $"{name}_{imageSize[imSizeIndx]}"; string outputSpeedFile = $"Output\\{testName}_speed.txt"; string inputBinaryImageFile = BinarizeImage("Output\\" + mnistImage, imageSize[imSizeIndx], testName); for (int topologyIndx = 0; topologyIndx < topologies.Length; topologyIndx++) { string finalName = $"{testName}_{topologies[topologyIndx]}"; string outputHamDistFile = $"Output\\{finalName}_hamming.txt"; string outputActColFile = $"Output\\{finalName}_activeCol.txt"; string outputImage = $"Output\\{finalName}.png"; int numOfActCols = 0; var sw = new Stopwatch(); using (StreamWriter swHam = new StreamWriter(outputHamDistFile)) { using (StreamWriter swSpeed = new StreamWriter(outputSpeedFile, true)) { using (StreamWriter swActCol = new StreamWriter(outputActColFile)) { numOfActCols = topologies[topologyIndx] * topologies[topologyIndx]; var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { imageSize[imSizeIndx], imageSize[imSizeIndx] }); parameters.setColumnDimensions(new int[] { topologies[topologyIndx], topologies[topologyIndx] }); parameters.setNumActiveColumnsPerInhArea(0.02 * numOfActCols); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sw.Start(); sp.Init(mem); sw.Stop(); swSpeed.WriteLine($"{topologies[topologyIndx]}|{(double)sw.ElapsedMilliseconds / (double)1000}"); int actiColLen = numOfActCols; int[] activeArray = new int[actiColLen]; //Read input csv file into array int[] inputVector = NeoCortexUtils.ReadCsvIntegers(inputBinaryImageFile).ToArray(); sw.Restart(); int iterations = 2; int[] oldArray = new int[activeArray.Length]; for (int k = 0; k < iterations; k++) { sp.compute(inputVector, activeArray, true); var activeCols = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var distance = MathHelpers.GetHammingDistance(oldArray, activeArray); swHam.WriteLine(distance + "\n"); var str = Helpers.StringifyVector(activeCols); oldArray = new int[actiColLen]; activeArray.CopyTo(oldArray, 0); } var activeStr = Helpers.StringifyVector(activeArray); swActCol.WriteLine("Active Array: " + activeStr); sw.Stop(); int[,] twoDimenArray = ArrayUtils.Make2DArray <int>(activeArray, topologies[topologyIndx], topologies[topologyIndx]); twoDimenArray = ArrayUtils.Transpose(twoDimenArray); NeoCortexUtils.DrawBitmap(twoDimenArray, OutImgSize, OutImgSize, outputImage); } } } } } }
public void TestMethod1() { //number of binary vectors that will be used to train the SP int numExamples = 10; //2 dimensional input vector. Each row will be used as one-dimensional vector int[,] inputVectors = new int[numExamples, 1000]; //2 dimensional output binary vector int[,] outputColumns = new int[numExamples, 2048]; //Feeding here the input vector with random binary numbers for (int i = 0; i < numExamples; i++) { for (int k = 0; k < 1000; k++) { var random = new Random(); inputVectors[i, k] = random.Next(0, 2); } } //Spatial Pooler initialization and configuration var parameters = Helpers.GetDefaultParams(); parameters.setInputDimensions(new int[] { 1000 }); parameters.setColumnDimensions(new int[] { 2048 }); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); //One dimensional input vector initialized with binary numbers int[] inputVector = Helpers.GetRandomVector(1000, parameters.Get <Random>(KEY.RANDOM)); //Array that will be expose to SP to active some of its columns int[] activeCols = new int[2048]; //List for active columns scores List <int> activeColScores = new List <int>(); //Learning is turned off sp.compute(mem, inputVector, activeCols, false); //overlaps score of the active columns var overlaps = sp.calculateOverlap(mem, inputVector); //Sorting reversely overlaps arrays then writing it to the text file overlapBeforeTraining.txt overlaps = reverseSort(overlaps); WriteIntArrayToFile("overlapBeforeTraining.txt", overlaps); //we put the overlap score of each active column in a list called activeColScores for (int i = 0; i < 2048; i++) { if (activeCols[i] != 0) { activeColScores.Add(overlaps[i]); } } int numberOfActivCols = 0; //counting the number of active columns foreach (int i in activeColScores) { numberOfActivCols = numberOfActivCols + 1; } // Array to take the number of active columns to write it to file for plotting purpose int[] numberOfActiveColumns = new int[] { 1 }; numberOfActiveColumns[0] = numberOfActivCols; int[] inputVectorsRowk = new int[] { }; int[] outputColumnsRowk = new int[] { }; //number of times the vectors are exposed to the SP int epochs = 1; //the "numExamples" input binary vectors are exposed to the SP "epochs" times to train it for (int i = 0; i < epochs; i++) { for (int k = 0; k < numExamples; k++) { inputVectorsRowk = GetRow(inputVectors, k); outputColumnsRowk = GetRow(outputColumns, k); sp.compute(mem, inputVectorsRowk, outputColumnsRowk, true); } } overlaps = sp.calculateOverlap(mem, inputVectorsRowk); overlaps = reverseSort(overlaps); WriteIntArrayToFile("overlapAfterTraining.txt", overlaps); WriteIntArrayToFile("numberOfActCols.txt", numberOfActiveColumns); //overlap before and after training , numberOfActCols needed for graphs runPythonCode2("overlapBeforeTraining.txt", "overlapAfterTraining.txt", "numberOfActCols.txt"); int[,] inputVectorsCorrupted = new int[numExamples, 1000]; int[,] outputColumnsCorrupted = new int[numExamples, 2048]; float[] Noise = new float[] { 0, 5, 10, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; double[] percentOverlapInputs = new double[Noise.Length]; double[] percentOverlapOutputs = new double[Noise.Length]; int[] inputVectorsCorruptedRow0 = GetRow(inputVectorsCorrupted, 0); int[] inputVectorsRow0 = GetRow(inputVectors, 0); int[] outputColumnsRow0 = GetRow(outputColumns, 0); int[] outputColumnsCorruptedRow0 = GetRow(outputColumnsCorrupted, 0); resetVector(inputVectorsRow0, inputVectorsCorruptedRow0); for (int i = 0; i < Noise.Length; i++) { inputVectorsCorruptedRow0 = corruptVector(ref inputVectorsRow0, Noise[i]); sp.compute(mem, inputVectorsRow0, outputColumnsRow0, false); sp.compute(mem, inputVectorsCorruptedRow0, outputColumnsCorruptedRow0, false); percentOverlapInputs[i] = percentOverlap(inputVectorsRow0, inputVectorsCorruptedRow0); percentOverlapOutputs[i] = percentOverlap(outputColumnsRow0, outputColumnsCorruptedRow0); } WriteDoubleArrayToFile("percentOverlapInputs.txt", percentOverlapInputs); WriteDoubleArrayToFile("percentOverlapOutputs.txt", percentOverlapOutputs); runPythonCode1("percentOverlapInputs.txt", "percentOverlapOutputs.txt"); }
public void SerializationTestWithTrainedData() { var parameters = GetDefaultParams(); parameters.setInputDimensions(new int[] { 16 * 16 }); parameters.setColumnDimensions(new int[] { 32 * 32 }); parameters.setNumActiveColumnsPerInhArea(0.02 * 32 * 32); parameters.setMinPctOverlapDutyCycles(0.01); var mem = new Connections(); parameters.apply(mem); var sp1 = new SpatialPooler(); sp1.init(mem); int[] activeArray = new int[32 * 32]; int[] inputVector = Helpers.GetRandomVector(16 * 16, parameters.Get <Random>(KEY.RANDOM)); /* int [] inputVector = { * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,1,1,1,1,1,1,1,1, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, * 1,1,1,1,1,1,1,1,0,0,0,0,0,0,0,0 }; * */ string str1 = String.Empty; for (int i = 0; i < 5; i++) { sp1.compute(inputVector, activeArray, true); var activeCols1 = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); str1 = Helpers.StringifyVector(activeCols1); Debug.WriteLine(str1); } /* JsonSerializerSettings settings = new JsonSerializerSettings * { * * DefaultValueHandling = DefaultValueHandling.Include, * ObjectCreationHandling = ObjectCreationHandling.Auto, * ReferenceLoopHandling = ReferenceLoopHandling.Serialize, * ConstructorHandling = ConstructorHandling.AllowNonPublicDefaultConstructor, * TypeNameHandling = TypeNameHandling.Auto * * }; */ // var jsConverted = JsonConvert.SerializeObject(sp1, Formatting.Indented, settings); // string file2 = "spSerializeTrain-newtonsoft.json"; // File.WriteAllText(file2, jsConverted); sp1.Serializer("spTrain1.json"); string ser1 = File.ReadAllText("spTrain1.json"); var sp2 = SpatialPooler.Deserializer("spTrain1.json"); sp2.Serializer("spTrainDes1.json"); string des1 = File.ReadAllText("spTrainDes1.json"); Assert.IsTrue(ser1.SequenceEqual(des1)); // SpatialPooler sp2 = JsonConvert.DeserializeObject<SpatialPooler>(File.ReadAllText(file2), settings); for (int i = 5; i < 10; i++) { sp2.compute(inputVector, activeArray, false); var activeCols2 = ArrayUtils.IndexWhere(activeArray, (el) => el == 1); var str2 = Helpers.StringifyVector(activeCols2); Debug.WriteLine(str2); Assert.IsTrue(str1.SequenceEqual(str2)); } sp2.Serializer("spTrain2.json"); string ser2 = File.ReadAllText("spTrain2.json"); // Assert.IsTrue(ser2.SequenceEqual(des1)); /* string serializedSecondPooler = JsonConvert.SerializeObject(sp2, Formatting.Indented, settings); * string fileSecondPooler = "spSerializeTrain-secondpooler-newtonsoft.json"; * File.WriteAllText(fileSecondPooler, serializedSecondPooler); * * SpatialPooler sp3 = JsonConvert.DeserializeObject<SpatialPooler>(File.ReadAllText(fileSecondPooler), settings); * string serializedThirdPooler = JsonConvert.SerializeObject(sp3, Formatting.Indented, settings); * * * Assert.IsTrue(serializedThirdPooler.SequenceEqual(serializedSecondPooler), "Third and second poolers are not equal"); * Assert.IsTrue(jsConverted.SequenceEqual(serializedSecondPooler), "First and second poolers are not equal"); */ }
public void TestMethod1() { //Arrays initialization int[] inputX1 = new int[1000]; int[] inputX2 = new int[1000]; int[] outputX1 = new int[2048]; int[] outputX2 = new int[2048]; //feeding inputX1 with random binary numbers for (int j = 0; j < 1000; j++) { var random = new Random(); inputX1[j] = random.Next(0, 2); } //Copying inputX1 to inputX2 to get 2 identical vectors for (int i = 0; i < 1000; i++) { inputX2[i] = inputX1[i]; } // Array containing noise levels (in percentage) float[] Noise = new float[] { 0, 5, 10, 15, 20, 30, 40, 50, 60, 70, 80, 90, 100 }; // Arrays where input and output overlap results will be stored double[] percentOverlapArrayInput = new double[Noise.Length]; double[] percentOverlapArrayOutput = new double[Noise.Length]; //Initialization and configuration of the spatial pooler var parameters = Helpers.GetDefaultParams(); parameters.setInputDimensions(new int[] { 1000 }); parameters.setColumnDimensions(new int[] { 2048 }); var sp = new SpatialPooler(); var mem = new Connections(); parameters.apply(mem); sp.init(mem); for (int j = 0; j < Noise.Length; j++) { // inputX1 is equivalent to inputX2 before the loop and can be then used as the reference one to which noise will be added inputX2 = corruptVector(ref inputX1, Noise[j]); //Feeding outputX1 and outputX2 to the spatial pooler sp.compute(mem, inputX1, outputX1, false); sp.compute(mem, inputX2, outputX2, false); //computing the input and output overlap and putting results in arrays percentOverlapArrayInput[j] = percentOverlap(inputX1, inputX2); percentOverlapArrayOutput[j] = percentOverlap(outputX1, outputX2); } // Writing the input overlap and output overlap arrays to text files WriteArrayToFile("OverlapInput.txt", percentOverlapArrayInput); WriteArrayToFile("OverlapOutput.txt", percentOverlapArrayOutput); //calling this method here will launch create-plots-Part2b script that will plot graphs with data contained in the files runPythonCode("OverlapInput.txt", "OverlapOutput.txt"); }