public static void printSplitByComponentHistogram(List <GeoWave>[] rfTreeArr, string analysisFolderName) { recordConfig rc = rfTreeArr.First().First().rc; //relevant only for local pca split if (rc.split_type != 5) { return; } StreamWriter sw = new StreamWriter(analysisFolderName + "\\localPCAsplitHistogram.txt", false); //get config from first tree and first node double[] splitHist = new double[rc.dim]; foreach (GeoWave node in (from tree in rfTreeArr from node in tree where node.dimIndex != -1 select node)) { splitHist[node.dimIndex] += node.norm; } double allNormSum = splitHist.Sum(); foreach (double splitDimCounter in splitHist) { //percent of norm explained sw.Write(Math.Round(100 * splitDimCounter / allNormSum, 3) + "\n"); } sw.Close(); }
public DecicionTree(recordConfig rc, DB db) { _trainingDt = db.PCAtraining_dt; _trainingLabel = db.training_label; _trainingGridIndexDt = db.PCAtraining_GridIndex_dt; _rc = rc; }
public static void printDataInRFormat(double[][] trainingData, double[][] testingData, double[] trainingLabel, double[] testingLabel, recordConfig rc, string analysisFolderName) { string testOutPath = analysisFolderName + "\\testDataR.txt"; string trainOutPath = analysisFolderName + "\\trainDataR.txt"; StreamWriter testWriter = new StreamWriter(testOutPath, false); StreamWriter trainWriter = new StreamWriter(trainOutPath, false); //write training heading int[] rTrainHeading = Enumerable.Range(1, rc.dim).ToArray(); trainWriter.WriteLine(string.Join(",", rTrainHeading) + ",responce"); testWriter.WriteLine(string.Join(",", rTrainHeading) + ",responce"); for (int i = 0; i < trainingData.Count(); i++) { trainWriter.WriteLine(string.Join(",", trainingData[i]) + "," + trainingLabel[i]); } for (int i = 0; i < testingData.Count(); i++) { testWriter.WriteLine(string.Join(",", testingData[i]) + "," + testingLabel[i]); } testWriter.Close(); trainWriter.Close(); }
public DecicionTree(recordConfig rc, double[][] trainingDt, double[][] trainingLabel, long[][] trainingGridIndexDt, bool[] dime2Take) { _trainingDt = trainingDt; _trainingLabel = trainingLabel; _rc = rc; _trainingGridIndexDt = trainingGridIndexDt; _dime2Take = dime2Take; }
public DecicionTree(recordConfig rc, DB db, bool[] dime2Take) { _trainingDt = db.PCAtraining_dt; _trainingLabel = db.training_label; _trainingGridIndexDt = db.PCAtraining_GridIndex_dt; _rc = rc; _dime2Take = dime2Take; }
public GiniEngine(recordConfig rc, DB db, NormType mode = NormType.Gini) { _mode = mode; _labels = db.training_label; _training = db.training_dt; _trainingGridIndex = db.PCAtraining_GridIndex_dt; _dataDim = rc.dim; _minWaveSize = rc.minWaveSize; _labelsDim = _labels[0].Count(); }
public GeoWave(int[][] BOX, int labelDim, recordConfig rc) { upperSplitValue = -1; this.rc = rc; Init(rc.dim, labelDim); for (int i = 0; i < 2; i++) { for (int j = 0; j < rc.dim; j++) { boubdingBox[i][j] = BOX[i][j]; } } //Array.Copy(BOX, boubdingBox, BOX[0].Count() * 2); //boubdingBox = BOX.Select(s => s.ToArray()).ToArray(); //BOX.CopyTo(boubdingBox, 0); }
// ReSharper disable once InconsistentNaming public static void printMainWithLocalPCAoriginalFeatureHist(List <GeoWave>[] rfTreeArr, string analysisFolderName, ref int idOfWorse, double predResult, recordConfig _rc) { recordConfig rc = rfTreeArr.First().First().rc; StreamWriter sw = new StreamWriter(analysisFolderName + "\\originalFeatureHist.txt", false); StreamWriter excludedIndexWriter = new StreamWriter(analysisFolderName + "\\ExcudedIndex.txt", true); double[] splitHist = new double[rc.dim]; foreach (List <GeoWave> tree in rfTreeArr) { List <GeoWave> sortedById = tree.OrderBy(o => o.ID).ToList(); foreach (GeoWave node in sortedById) { //skip root node (because not resulted from split) if (node.parentID == -1) { continue; } if (sortedById[node.parentID].typeTransformed == DecicionTree.SplitType.MainAxes) { splitHist[node.dimIndex] += node.norm; continue; } //node was created as split at pcaInd dimention of parent node int pcaInd = node.dimIndex; double[] splitEigenvector = sortedById[node.parentID].localPca.Components[pcaInd].Eigenvector; //debug ortho test: double vecNorm = splitEigenvector.Select(d => Math.Pow(d,2)).Sum(); //go over original featchers for (int i = 0; i < _rc.dim; i++) { splitHist[i] += (Math.Abs(splitEigenvector[i]) * node.norm); } } } splitHist = splitHist.Select(d => d / rc.rfNum).ToArray(); foreach (double orgFeatureImportance in splitHist) { //percent of norm explained sw.Write(orgFeatureImportance + "\r\n"); } sw.Close(); idOfWorse = Array.IndexOf(splitHist, splitHist.Min()); excludedIndexWriter.WriteLine(idOfWorse + "\t" + predResult); excludedIndexWriter.Close(); }
public static void printWaveletsProperties(List <GeoWave> decisionGeoWaveArr, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } recordConfig rc = decisionGeoWaveArr[0].rc; //different log for local pca split if (rc.split_type == 5) { printPcaWaveletsProperties(decisionGeoWaveArr, sw); return; } sw.WriteLine("norm, level, Npoints, volume, dimSplit, MainGridIndexSplit"); foreach (GeoWave t in decisionGeoWaveArr) { double volume = 1; //if (pnt[i] < Form1.MainGrid[i][BoxOfIndeces[0][i]] || pnt[i] > Form1.MainGrid[i][BoxOfIndeces[1][i]]) for (int j = 0; j < t.boubdingBox[0].Count(); j++) { volume *= (Form1.MainGrid[j][t.boubdingBox[1][j]] - Form1.MainGrid[j][t.boubdingBox[0][j]]); } sw.WriteLine(t.norm + ", " + t.level + ", " + t.pointsIdArray.Count() + ", " + volume + ", " + t.dimIndex + ", " + t.Maingridindex + ", " + t.MaingridValue); } sw.Close(); }
public static void printWaveletsProperties(List <GeoWave> decisionGeoWaveArr, string filename) { StreamWriter sw; /* if (Form1.UseS3) * {*/ /* string dir_name = Path.GetDirectoryName(filename); * string file_name = Path.GetFileName(filename); * * S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); * S3FileInfo artFile = s3dir.GetFile(file_name); * sw = new StreamWriter(artFile.OpenWrite());*/ /* } * else*/ sw = new StreamWriter(filename, false); recordConfig rc = decisionGeoWaveArr[0].rc; sw.WriteLine("norm\t\tllevel\t\ttypeTransformed\t\tlNpoints\t\tldimSplit\t\tMeanValue"); foreach (GeoWave t in decisionGeoWaveArr) { /*double volume = 1; * * for (int j = 0; j < t.boubdingBox[0].Count(); j++) * volume *= (Form1.MainGrid[j][t.boubdingBox[1][j]] - Form1.MainGrid[j][t.boubdingBox[0][j]]); * * sw.WriteLine(t.norm + ", " + t.level + ", " + t.typeTransformed.ToString() + t.pointsIdArray.Count() + ", " + volume + ", " + t.dimIndex + ", " + t.Maingridindex + ", " + t.MaingridValue);*/ double nodeError = Math.Round(t.MeanValue[0], 4); sw.WriteLine(Math.Round(t.norm, 4) + "\t\t" + t.level + "\t\t" + t.typeTransformed + "\t\t" + t.pointsIdArray.Count() + "\t\t" + "\t\t" + t.dimIndex + "\t\t" + nodeError.ToString(CultureInfo.InvariantCulture)); } sw.Close(); }
private bool[] getDim2Take(recordConfig rc, int seed) { bool[] Dim2Take = new bool[rc.dim]; var ran = new Random(seed); //List<int> dimArr = Enumerable.Range(0, rc.dim).OrderBy(x => ran.Next()).ToList().GetRange(0, rc.dim); //List<int> dimArr = Enumerable.Range(0, rc.dim).OrderBy(x => ran.Next()).ToList().GetRange(0, rc.dim); for (int i = 0; i < rc.NDimsinRF; i++) { //Dim2Take[dimArr[i]] = true; int index = ran.Next(0, rc.dim); if (Dim2Take[index]) { i--; } else { Dim2Take[index] = true; } } return(Dim2Take); }
public static void printSplitByComponentHistogram(List <GeoWave>[] rfTreeArr, string analysisFolderName) { recordConfig rc = rfTreeArr.First().First().rc; //relevant only for local pca split if (rc.split_type != 5) { return; } StreamWriter sw = new StreamWriter(analysisFolderName + "\\localPCAsplitHistogram.txt", false); //get config from first tree and first node int[] splitHist = new int[rc.dim]; foreach (GeoWave node in (from tree in rfTreeArr from node in tree where node.dimIndex != -1 select node)) { splitHist[node.dimIndex]++; } foreach (int splitDimCounter in splitHist) { sw.Write(splitDimCounter + " "); } sw.Close(); }
public static double[][] copyAndRemoveCategoricalColumns(double[][] originalData, recordConfig rc) { if (!rc.hasCategorical) { return(originalData); //don't have categorical variables } double[][] withRemovedColumns = new double[originalData.Length][]; for (int i = 0; i < originalData.Length; i++) { double[] row = originalData[i]; row = rc.indOfCategorical.Aggregate(row, (current, indOfColumnToRemove) => current.Where((val, ind) => ind != indOfColumnToRemove).ToArray()); withRemovedColumns[i] = (double[])row.Clone(); } return(withRemovedColumns); }
public GeoWave(int dataDim, int labelDim, recordConfig rc) { this.rc = rc; Init(dataDim, labelDim); }
public DecicionTree(recordConfig rc, double[][] trainingDt, double[][] trainingLabel) { _trainingDt = trainingDt; _trainingLabel = trainingLabel; _rc = rc; }
private void btnScript_Click(object sender, EventArgs e) { set2Config(); Refresh(); u_config.printConfig(@"C:\Wavelets decomposition\config.txt"); // AmazonS3Client client = Helpers.configAmazonS3ClientS3Client(); UseS3 = UseS3CB.Checked; rumPrallel = rumPrallelCB.Checked; runBoosting = runBoostingCB.Checked; runProoning = runProoningCB.Checked; runBoostingProoning = runBoostingProoningCB.Checked; runRFProoning = runRFProoningCB.Checked; runRf = runRfCB.Checked; runBoostingLearningRate = runBoostingLearningRateCB.Checked; bucketName = bucketTB.Text; string results_path = @ResultsTB.Text; string db_path = @DBTB.Text + "\\";//@"C:\Users\Administrator\Dropbox\ADA\ada_valid\"; //"D:\\Phd\\Shai\\code\\tests\\helix tests\\noise_5\\noise_5\\"; // "C:\\reasearch\\tests\\lena\\"; //get dir MainFolderName = results_path; //Helpers.createMainDirectoryOrResultPath(results_path, bucketName, client); Helpers.createMainDirectoryOrResultPath(results_path, bucketName); //READ DATA DB db = new DB(); db.training_dt = db.getDataTable(db_path + "trainingData.txt"); db.testing_dt = db.getDataTable(db_path + "testingData.txt"); db.validation_dt = db.getDataTable(db_path + "ValidData.txt"); db.training_label = db.getDataTable(db_path + "trainingLabel.txt"); db.testing_label = db.getDataTable(db_path + "testingLabel.txt"); db.validation_label = db.getDataTable(db_path + "ValidLabel.txt"); upper_label = db.training_label.Max(); lower_label = db.training_label.Min(); double trainingPercent = double.Parse(trainingPercentTB.Text); // 0.02; long rowToRemoveFrom = Convert.ToInt64(db.training_dt.Count() * trainingPercent); db.training_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.training_label = db.training_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_dt = db.testing_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_label = db.testing_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_label = db.validation_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); //REDUCE DIM, GLOBAL PCA if (usePCA.Checked) { DimReduction dimreduction = new DimReduction(db.training_dt); db.PCAtraining_dt = dimreduction.getGlobalPca(db.training_dt); db.PCAtesting_dt = dimreduction.getGlobalPca(db.testing_dt); db.PCAvalidation_dt = dimreduction.getGlobalPca(db.validation_dt); } else { //de-activate pca for dbg db.PCAtraining_dt = db.training_dt; db.PCAtesting_dt = db.testing_dt; db.PCAvalidation_dt = db.validation_dt; } db.PCAtraining_GridIndex_dt = new long[db.PCAtraining_dt.Count()][]; for (int i = 0; i < db.PCAtraining_dt.Count(); i++) { db.PCAtraining_GridIndex_dt[i] = new long[db.PCAtraining_dt[i].Count()]; } //BOUNDING BOX AND MAIN GRID boundingBox = db.getboundingBox(db.PCAtraining_dt); MainGrid = db.getMainGrid(db.PCAtraining_dt, boundingBox, ref db.PCAtraining_GridIndex_dt); //READ CONFIG methodConfig mc = new methodConfig(true); int Nloops = int.Parse(NloopsTB.Text) - 1; int Kfolds = 0; if (int.TryParse(croosValidTB.Text, out Kfolds)) { Nloops = Kfolds - 1; } for (int k = 0; k < Nloops; k++) { mc.boostlamda_0.Add(3.8); // - create variant in number of pixels } mc.generateRecordConfigArr(); for (int k = 0; k < mc.recArr.Count(); k++) { //manual set indeces of categorical variable //mc.recArr[k].indOfCategorical.Add(3); mc.recArr[k].learningType = (recordConfig.LearnigType)comboLearningType.SelectedIndex; // regression, binary class, multy class if (mc.recArr[k].learningType == recordConfig.LearnigType.BinaryClassification) { mc.recArr[k].binaryMinClass = lower_label; mc.recArr[k].binaryMaxClass = upper_label; mc.recArr[k].midClassValue = 0.5 * (lower_label + upper_label); } mc.recArr[k].dim = NfeaturesTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesTB.Text, k)); mc.recArr[k].approxThresh = double.Parse(evaluateString(approxThreshTB.Text, k)); // 0.1; mc.recArr[k].partitionErrType = int.Parse(evaluateString(partitionTypeTB.Text, k)); //2; mc.recArr[k].minWaveSize = int.Parse(evaluateString(minNodeSizeTB.Text, k)); //1;//CHANGE AFTER DBG mc.recArr[k].hopping_size = int.Parse(evaluateString(waveletsSkipEstimationTB.Text, k)); //25;// 10 + 5 * (k + 1);// +5 * (k % 10);// 1;//25; mc.recArr[k].test_error_size = double.Parse(evaluateString(waveletsPercentEstimationTB.Text, k)); // +0.05 * (k % 10);// 1;// 0.1;//percent of waves to check mc.recArr[k].NskipsinKfunc = double.Parse(evaluateString(boostingKfuncPercentTB.Text, k)); // 0.0025; mc.recArr[k].rfBaggingPercent = double.Parse(evaluateString(bagginPercentTB.Text, k)); // 0.6; mc.recArr[k].rfNum = int.Parse(evaluateString(NrfTB.Text, k)); // k + 1;//10 + k*10;// 100 / (k + 46) * 2;// int.Parse(Math.Pow(10, k + 1).ToString()); mc.recArr[k].boostNum = int.Parse(evaluateString(NboostTB.Text, k)); // 10; mc.recArr[k].boostProoning_0 = int.Parse(evaluateString(NfirstPruninginBoostingTB.Text, k)); //13 mc.recArr[k].boostlamda_0 = double.Parse(evaluateString(boostingLamda0TB.Text, k)); // 0.01 - (k + 1) * 0.001; //0.05;// 0.0801 + k * 0.001;// Math.Pow(0.1, k);// 0.22 + k*0.005; mc.recArr[k].NwaveletsBoosting = int.Parse(evaluateString(NfirstwaveletsBoostingTB.Text, k)); // 4;// k + 1; //mc.recArr[k].learningRate = 0;// 0.01; mc.recArr[k].boostNumLearningRate = int.Parse(evaluateString(NboostingLearningRateTB.Text, k)); // 55;// 18; mc.recArr[k].percent_training_db = trainingPercent; mc.recArr[k].BoundLevel = int.Parse(evaluateString(boundLevelTB.Text, k)); //1024; mc.recArr[k].NDimsinRF = NfeaturesrfTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesrfTB.Text, k)); mc.recArr[k].split_type = int.Parse(evaluateString(splitTypeTB.Text, k)); //0 mc.recArr[k].NormLPType = int.Parse(evaluateString(errTypeEstimationTB.Text, k)); mc.recArr[k].RFpruningTestRange[1] = int.Parse(evaluateString(RFpruningEstimationRange1TB.Text, k)); // 12;// k + 9; mc.recArr[k].boundDepthTree = int.Parse(evaluateString(boundDepthTB.Text, k)); //1024; mc.recArr[k].CrossValidFold = k; // 2m0rr0w2 save labels dim in confif mc.recArr[k].labelDim = db.training_label[0].Count(); //mc.recArr[k].boostNum = t ;// tmp to delete !!!!!!! //mc.recArr[k].RFwaveletsTestRange[0] = 25; //mc.recArr[k].RFwaveletsTestRange[1] = 50; } // Helpers.createOutputDirectories(mc.recArr, client, u_config, bucketName, results_path); Helpers.createOutputDirectories(mc.recArr, u_config, bucketName, results_path); //SET ID ARRAY LIST List <int> trainingID = Enumerable.Range(0, db.PCAtraining_dt.Count()).ToList(); List <int> testingID = Enumerable.Range(0, db.PCAtesting_dt.Count()).ToList(); //cross validation List <List <int> > trainingFoldId = new List <List <int> >(); List <List <int> > testingFoldId = new List <List <int> >(); Random ran = new Random(2); List <int> training_rand = trainingID.OrderBy(x => ran.Next()).ToList().GetRange(0, trainingID.Count); //THE LARGEST GROUP IS TRAINING if (int.TryParse(croosValidTB.Text, out Kfolds)) { createCrossValid(Kfolds, training_rand, trainingFoldId, testingFoldId); } //bounding intervals int[][] BB = new int[2][]; BB[0] = new int[boundingBox[0].Count()]; BB[1] = new int[boundingBox[0].Count()]; for (int i = 0; i < boundingBox[0].Count(); i++) { BB[1][i] = MainGrid[i].Count() - 1; //set last index in each dim } //loop over folds DEBUG i=2 for (int i = 3; i < mc.recArr.Count; i++) { recordConfig rc = mc.recArr[i]; Analizer analizer = new Analizer(MainFolderName + "\\" + rc.getShortName(), MainGrid, db, rc); //exclude variables one by one double[] numFeachuresVSerror = new double[rc.dim]; int indFeachureToExclude = -1; double predResult = 0; //loop over feachures for (int j = rc.dim - 1; j >= 0; j--) { //DEBUG, DISABLE FEACHURE EXCLUDE // j = 0; if (!croosValidCB.Checked) { analizer.analize(trainingID, testingID, BB, ref indFeachureToExclude, ref predResult); } else { analizer.analize(trainingFoldId[i], testingFoldId[i], BB, ref indFeachureToExclude, ref predResult); //cross validation } numFeachuresVSerror[j] = predResult; // index j, used j+1 featchures analizer.excludeFeatureFromDb(indFeachureToExclude); } PrintEngine.printBestErrorByNumberOfFeatchures(analizer._analysisFolderName, numFeachuresVSerror); } btnScript.BackColor = Color.Green; }
public void generateRecordConfigArr() { for (int i0 = 0; i0 < dim.Count; i0++) { for (int i1 = 0; i1 < approxThresh.Count; i1++) { for (int i2 = 0; i2 < partitionErrType.Count; i2++) { for (int i3 = 0; i3 < approxOrder.Count; i3++) { for (int i4 = 0; i4 < boostNum.Count; i4++) { for (int i5 = 0; i5 < boostlamda_0.Count; i5++) { for (int i6 = 0; i6 < boostNormTarget.Count; i6++) { for (int i7 = 0; i7 < boostNormsecond.Count; i7++) { for (int i8 = 0; i8 < boostTau.Count; i8++) { for (int i9 = 0; i9 < rfNum.Count; i9++) { for (int i10 = 0; i10 < rfBaggingPercent.Count; i10++) { for (int i11 = 0; i11 < NskipsinKfunc.Count; i11++) { for (int i12 = 0; i12 < minWaveSize.Count; i12++) { for (int i13 = 0; i13 < hopping_size.Count; i13++) { for (int i14 = 0; i14 < test_error_size.Count; i14++) { for (int i15 = 0; i15 < boostProoning_0.Count; i15++) { recordConfig rc = new recordConfig(dim[i0], approxThresh[i1], partitionErrType[i2], approxOrder[i3], boostNum[i4], boostlamda_0[i5], boostNormTarget[i6], boostNormsecond[i7], boostTau[i8], rfNum[i9], rfBaggingPercent[i10], NskipsinKfunc[i11], minWaveSize[i12], hopping_size[i13], test_error_size[i14], boostProoning_0[i15], 0); recArr.Add(rc); } } } } } } } } } } } } } } } } }
public static List <GeoWave> getConstWaveletsFromFile(string filename, recordConfig rc) { if (!Form1.UseS3 && !File.Exists(filename))//this func was not debugged after modification { MessageBox.Show(@"the file " + Path.GetFileName(filename) + @" doesnt exist in " + Path.GetFullPath(filename)); return(null); } StreamReader sr; /* if (Form1.UseS3) * { * string dir_name = Path.GetDirectoryName(filename); * string file_name = Path.GetFileName(filename); * * S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); * S3FileInfo artFile = s3dir.GetFile(file_name); * sr = artFile.OpenText(); * } * else*/ sr = new StreamReader(File.OpenRead(filename)); string[] values = { "" }; string line; string DimensionReductionMatrix = ""; int numOfWavlets = -1; int dimension = -1; int labelDimension = -1; double approxOrder = -1; while (!sr.EndOfStream && values[0] != "StartReading") { line = sr.ReadLine(); values = line.Split(Form1.seperator, StringSplitOptions.RemoveEmptyEntries); if (values[0] == "DimensionReductionMatrix") { DimensionReductionMatrix = values[1]; } else if (values[0] == "numOfWavlets") { numOfWavlets = int.Parse(values[1]); } else if (values[0] == "approxOrder") { approxOrder = int.Parse(values[1]); } else if (values[0] == "dimension") { dimension = int.Parse(values[1]); } else if (values[0] == "labelDimension") { labelDimension = int.Parse(values[1]); } else if (values[0] == "StartReading") { ; } else { MessageBox.Show(@"the file " + Path.GetFileName(filename) + @" already exist in " + Path.GetFullPath(filename) + @" might have bad input !"); } } //read values List <GeoWave> gwArr = new List <GeoWave>(); while (!sr.EndOfStream) { GeoWave gw = new GeoWave(dimension, labelDimension, rc); line = sr.ReadLine(); if (line != null) { values = line.Split(Form1.seperator, StringSplitOptions.RemoveEmptyEntries); } gw.ID = int.Parse(values[0]); gw.child0 = int.Parse(values[1]); gw.child1 = int.Parse(values[2]); int counter = 0; for (int j = 0; j < dimension; j++) { gw.boubdingBox[0][j] = int.Parse(values[3 + 4 * j]);//the next are the actual values and not the indeces int the maingrid - so we skip 4 elementsat a time gw.boubdingBox[1][j] = int.Parse(values[4 + 4 * j]); counter = 4 + 2 * 4; } gw.level = int.Parse(values[counter + 1]); counter = counter + 2; for (int j = 0; j < labelDimension; j++) { gw.MeanValue[j] = double.Parse(values[counter + j]); counter++; } gw.norm = double.Parse(values[counter]); gw.parentID = int.Parse(values[counter + 1]); gwArr.Add(gw); } sr.Close(); return(gwArr); }