public static void printtable(List <int>[] table, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } for (int i = 0; i < table.Count(); i++) { var line = ""; for (int j = 0; j < table[i].Count(); j++) { line += table[i][j].ToString() + " "; } sw.WriteLine(line); } sw.Close(); }
static public void printLevelWaveletNorm(List <GeoWave> decisionGeoWaveArr, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } /* int dataDim = decisionGeoWaveArr[0].rc.dim; * int labelDim = decisionGeoWaveArr[0].MeanValue.Count();*/ foreach (GeoWave t in decisionGeoWaveArr) { sw.WriteLine(t.level + ", " + t.norm); } sw.Close(); }
//************************************** PRINT FUNCTIONS WITH OUT REFERENCES **************************************************** public static void printErrorsToFile(string filename, double l2, double l1, double l0, double testSize) { StreamWriter writer; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); writer = new StreamWriter(artFile.OpenWrite()); } else { writer = new StreamWriter(filename, false); } //WRITE writer.WriteLine("l2 estimation error: " + l2.ToString(CultureInfo.InvariantCulture)); writer.WriteLine("l1 estimation error: " + l1.ToString(CultureInfo.InvariantCulture)); writer.WriteLine("num of miss labels: " + l0.ToString(CultureInfo.InvariantCulture)); writer.WriteLine("num of tests: " + testSize.ToString(CultureInfo.InvariantCulture)); writer.WriteLine("sucess rate : " + (1 - (l0 / testSize)).ToString(CultureInfo.InvariantCulture)); writer.Close(); }
public static void printList(List <double> lst, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } for (int i = 0; i < lst.Count(); i++) { sw.WriteLine(lst[i]); } sw.Close(); }
public async Task <bool> WriteFileToStorageAsync(string fileContent, DateTime dateFileName, FileType fileType) { try { S3FileInfo newFile = _subDirectory.GetFile(string.Format("{0}.{1}", dateFileName.ToString("yyyy MMMM dd"), GetFileTypeExtension(fileType))); // Write the file using (StreamWriter writer = new StreamWriter(newFile.OpenWrite())) { await writer.WriteAsync(fileContent); } } catch { return(false); } _latestDate = dateFileName.AddDays(1); return(true); }
public async Task <string> GetFileContents(string fileName, FileType fileType) { // Get the fully qualified file name string fullFileName = string.Format("{0}.{1}", fileName, GetFileTypeExtension(fileType)); S3FileInfo serverFile = _mainDirectory.GetFile(fullFileName); try { using (StreamReader reader = new StreamReader(serverFile.OpenRead())) { return(await reader.ReadToEndAsync()); } } catch { return(String.Empty); } }
static void Main(string[] args) { var zipFilename = @"c:\temp\data.zip"; var client = new AmazonS3Client(); S3DirectoryInfo rootDir = new S3DirectoryInfo(client, "norm-ziptest"); using (var zip = new ZipFile()) { zip.Name = zipFilename; addFiles(zip, rootDir, ""); } // Move local zip file to S3 var fileInfo = rootDir.GetFile("data.zip"); fileInfo.MoveFromLocal(zipFilename); }
public static void printWaveletsProperties(List <GeoWave> decisionGeoWaveArr, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } recordConfig rc = decisionGeoWaveArr[0].rc; //different log for local pca split if (rc.split_type == 5) { printPcaWaveletsProperties(decisionGeoWaveArr, sw); return; } sw.WriteLine("norm, level, Npoints, volume, dimSplit, MainGridIndexSplit"); foreach (GeoWave t in decisionGeoWaveArr) { double volume = 1; //if (pnt[i] < Form1.MainGrid[i][BoxOfIndeces[0][i]] || pnt[i] > Form1.MainGrid[i][BoxOfIndeces[1][i]]) for (int j = 0; j < t.boubdingBox[0].Count(); j++) { volume *= (Form1.MainGrid[j][t.boubdingBox[1][j]] - Form1.MainGrid[j][t.boubdingBox[0][j]]); } sw.WriteLine(t.norm + ", " + t.level + ", " + t.pointsIdArray.Count() + ", " + volume + ", " + t.dimIndex + ", " + t.Maingridindex + ", " + t.MaingridValue); } sw.Close(); }
static public void printConstWavelets2File(List <GeoWave> decisionGeoWaveArr, string filename) { StreamWriter sw; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sw = new StreamWriter(artFile.OpenWrite()); } else { sw = new StreamWriter(filename, false); } int dataDim = decisionGeoWaveArr[0].rc.dim; int labelDim = decisionGeoWaveArr[0].MeanValue.Count(); //save metadata foreach (GeoWave t in decisionGeoWaveArr) { string line = t.ID.ToString() + "; " + t.child0.ToString() + "; " + t.child1.ToString() + "; "; for (int j = 0; j < dataDim; j++) { line += t.boubdingBox[0][j].ToString() + "; " + t.boubdingBox[1][j].ToString() + "; " + Form1.MainGrid[j][t.boubdingBox[0][j]].ToString(CultureInfo.InvariantCulture) + "; " + Form1.MainGrid[j][t.boubdingBox[1][j]].ToString(CultureInfo.InvariantCulture) + "; "; } line += t.level + "; "; for (int j = 0; j < labelDim; j++) { line += t.MeanValue[j].ToString(CultureInfo.InvariantCulture) + "; "; } line += t.norm + "; " + t.parentID.ToString(); sw.WriteLine(line); } sw.Close(); }
public async Task <AnnotationPackage> DownloadPackageAsync(AnnotationPackage package) { if (!Directory.Exists(this._extractionFolder)) { Directory.CreateDirectory(this._extractionFolder); } var dir = new S3DirectoryInfo(this._client, this._bucketName); var file = dir.GetFile(package.PackagePath); var zipFilePath = Path.Combine(this._extractionFolder, file.Name); package.Downloading = true; //FileInfo fileInfo = null; //await Task.Run(() => //{ // fileInfo = file.CopyToLocal(zipFilePath, true); //}); var request = new GetObjectRequest { BucketName = this._bucketName, Key = file.Name }; using (var response = this._client.GetObject(request)) { this._currentlyDownloadedPackages.Add(package); response.WriteObjectProgressEvent += this.WriteObjectProgressEvent; await response.WriteResponseStreamToFileAsync(zipFilePath, false, new System.Threading.CancellationToken()); this._currentlyDownloadedPackages.Remove(package); response.WriteObjectProgressEvent -= this.WriteObjectProgressEvent; } package.Downloading = false; package.PackagePath = zipFilePath; package.DisplayName = Path.GetFileNameWithoutExtension(zipFilePath); return(await Task.FromResult(package)); }
public static List <double> getBoostingNormThresholdList(string filename) { if (!Form1.UseS3 && !File.Exists(filename)) { MessageBox.Show(@"the file " + Path.GetFileName(filename) + @" doesnt exist in " + Path.GetFullPath(filename)); return(null); } StreamReader sr; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sr = artFile.OpenText(); } else { sr = new StreamReader(File.OpenRead(filename)); } string line = sr.ReadLine(); List <double> NormArry = new List <double>(); if (line != null) { string[] values = line.Split(Form1.seperator, StringSplitOptions.RemoveEmptyEntries); for (int i = 0; i < values.Count(); i++) { NormArry.Add(double.Parse(values[i])); } sr.Close(); } return(NormArry); }
static void Main(string[] args) { if (checkRequiredFields()) { using (client = Amazon.AWSClientFactory.CreateAmazonS3Client(RegionEndpoint.USWest2)) { // Creates the bucket. S3DirectoryInfo rootDirectory = new S3DirectoryInfo(client, bucketName); rootDirectory.Create(); // Creates a file at the root of the bucket. S3FileInfo readme = rootDirectory.GetFile("README.txt"); using (StreamWriter writer = new StreamWriter(readme.OpenWrite())) writer.WriteLine("This is my readme file."); DirectoryInfo localRoot = new DirectoryInfo(@"C:\"); DirectoryInfo localCode = localRoot.CreateSubdirectory("code"); // Create a directory called code and write a file to it. S3DirectoryInfo codeDir = rootDirectory.CreateSubdirectory("code"); S3FileInfo codeFile = codeDir.GetFile("Program.cs"); using (StreamWriter writer = new StreamWriter(codeFile.OpenWrite())) { writer.WriteLine("namespace S3FileSystem_Sample"); writer.WriteLine("{"); writer.WriteLine(" class Program"); writer.WriteLine(" {"); writer.WriteLine(" static void Main(string[] args)"); writer.WriteLine(" {"); writer.WriteLine(" Console.WriteLine(\"Hello World\");"); writer.WriteLine(" }"); writer.WriteLine(" }"); writer.WriteLine("}"); } // Create a directory called license and write a file to it. S3DirectoryInfo licensesDir = rootDirectory.CreateSubdirectory("licenses"); S3FileInfo licenseFile = licensesDir.GetFile("license.txt"); using (StreamWriter writer = new StreamWriter(licenseFile.OpenWrite())) writer.WriteLine("A license to code"); Console.WriteLine("Write Directory Structure"); Console.WriteLine("------------------------------------"); WriteDirectoryStructure(rootDirectory, 0); Console.WriteLine("\n\n"); foreach (var file in codeDir.GetFiles()) { Console.WriteLine("Content of {0}", file.Name); Console.WriteLine("------------------------------------"); using (StreamReader reader = file.OpenText()) { Console.WriteLine(reader.ReadToEnd()); } } // Deletes all the files and then the bucket. rootDirectory.Delete(true); } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
public double[][] getDataTableTMP(string filename) { StreamReader reader; long lineCount = 0; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); if (artFile.Exists == false && file_name.Contains("Valid")) { string tmp = file_name.Replace("Valid", "testing"); artFile = s3dir.GetFile(tmp); } bool faileEread = true; reader = null; while (faileEread) { try { reader = artFile.OpenText(); while (!reader.EndOfStream) { reader.ReadLine(); lineCount++; } reader.DiscardBufferedData(); reader.BaseStream.Seek(0, SeekOrigin.Begin); reader.BaseStream.Position = 0; faileEread = false; } catch { faileEread = true; } } //reader = artFile.OpenText(); //reader = artFile.OpenRead(); //while (!reader.EndOfStream) //{ // reader.ReadLine(); // lineCount++; //} //reader.DiscardBufferedData(); //reader.BaseStream.Seek(0, SeekOrigin.Begin); //reader.BaseStream.Position = 0; } else { if (!File.Exists(filename))//IF NO VALID EXISTS - TRY WITH TEST { filename = filename.Replace("Valid", "testing"); } reader = new StreamReader(File.OpenRead(filename)); lineCount = File.ReadAllLines(filename).Length; } //GET THE FIRST LINE string line = reader.ReadLine(); string[] values = line.Split(seperator, StringSplitOptions.RemoveEmptyEntries); //IF NO VALUES ALERT if (values.Count() < 1) { return(null); } double[][] dt = new double[lineCount][]; dt[0] = new double[values.Count()]; for (int j = 0; j < values.Count(); j++) { dt[0][j] = double.Parse(values[j]); } //SET VALUES TO TABLE int counter = 1; while (!reader.EndOfStream) { line = reader.ReadLine(); values = line.Split(seperator, StringSplitOptions.RemoveEmptyEntries); dt[counter] = new double[values.Count()]; for (int j = 0; j < values.Count(); j++) { double tmp; if (double.TryParse(values[j], out tmp)) { dt[counter][j] = tmp; } else { dt[counter][j] = -1; } } counter++; } reader.Close(); return(dt); }
public double[][] getDataTableWithNan(string filename, ref Dictionary <Tuple <int, int>, bool> naTable) { StreamReader reader; long lineCount = 0; List <double> artVal = new List <double>(); List <int> emptyVal = new List <int>(); string tmpline = ""; #region first loop if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); if (artFile.Exists == false && file_name.Contains("Valid")) { string tmp = file_name.Replace("Valid", "testing"); artFile = s3dir.GetFile(tmp); } bool faileEread = true; reader = null; while (faileEread) { try { reader = artFile.OpenText(); while (!reader.EndOfStream) { tmpline = reader.ReadLine(); lineCount++; //*********************** //haqndle missing values if (lineCount == 1)//first read { string[] tmpvalues = tmpline.Split(seperator, StringSplitOptions.RemoveEmptyEntries); for (int i = 0; i < tmpvalues.Count(); i++) { double tmpDouble; if (double.TryParse(tmpvalues[i], out tmpDouble)) { artVal.Add(tmpDouble); } else { artVal.Add(0); emptyVal.Add(i); } } } if (emptyVal.Count > 0) // not all artVal values are set - at least once... { string[] tmpvalues = tmpline.Split(seperator, StringSplitOptions.RemoveEmptyEntries); for (int i = 0; i < emptyVal.Count(); i++) { double tmpDouble; if (double.TryParse(tmpvalues[emptyVal[i]], out tmpDouble)) { artVal[emptyVal[i]] = tmpDouble; emptyVal.RemoveAt(i); } } } //*********************** } reader.DiscardBufferedData(); reader.BaseStream.Seek(0, SeekOrigin.Begin); reader.BaseStream.Position = 0; faileEread = false; } catch { faileEread = true; } } } else { if (!File.Exists(filename))//IF NO VALID EXISTS - TRY WITH TEST { filename = filename.Replace("Valid", "testing"); } reader = new StreamReader(File.OpenRead(filename)); lineCount = File.ReadAllLines(filename).Length; StreamReader tmpreader = new StreamReader(File.OpenRead(filename)); bool stoploop = false; bool firstLine = true; while (!tmpreader.EndOfStream && !stoploop) { tmpline = tmpreader.ReadLine(); if (firstLine)//first read { firstLine = false; string[] tmpvalues = tmpline.Split(seperator, StringSplitOptions.RemoveEmptyEntries); for (int i = 0; i < tmpvalues.Count(); i++) { double tmpDouble; if (double.TryParse(tmpvalues[i], out tmpDouble)) { artVal.Add(tmpDouble); } else { artVal.Add(0); emptyVal.Add(i); } } } if (emptyVal.Count > 0) // not all artVal values are set - at least once... { string[] tmpvalues = tmpline.Split(seperator, StringSplitOptions.RemoveEmptyEntries); for (int i = 0; i < emptyVal.Count(); i++) { double tmpDouble; if (double.TryParse(tmpvalues[emptyVal[i]], out tmpDouble)) { artVal[emptyVal[i]] = tmpDouble; emptyVal.RemoveAt(i); } } } //*********************** if (emptyVal.Count < 1) { stoploop = true; } } tmpreader.Close(); } #endregion if (emptyVal.Count > 0) { MessageBox.Show("there is a column with only Nan values - stop and repair"); } //GET THE FIRST LINE string line = reader.ReadLine(); string[] values = line.Split(seperator, StringSplitOptions.RemoveEmptyEntries); //IF NO VALUES ALERT if (values.Count() < 1) { return(null); } double[][] dt = new double[lineCount][]; dt[0] = new double[values.Count()]; for (int j = 0; j < values.Count(); j++) { double tmpDouble = 0; if (double.TryParse(values[j], out tmpDouble)) { dt[0][j] = double.Parse(values[j]); } else { dt[0][j] = artVal[j]; naTable.Add(new Tuple <int, int>(0, j), true); } } //SET VALUES TO TABLE int counter = 1; while (!reader.EndOfStream) { line = reader.ReadLine(); values = line.Split(seperator, StringSplitOptions.RemoveEmptyEntries); dt[counter] = new double[values.Count()]; for (int j = 0; j < values.Count(); j++) { double tmpDouble = 0; if (double.TryParse(values[j], out tmpDouble)) { dt[counter][j] = double.Parse(values[j]); } else { dt[counter][j] = artVal[j]; naTable.Add(new Tuple <int, int>(counter, j), true); } } counter++; } reader.Close(); return(dt); }
private static void Main(string[] args) { if (checkRequiredFields()) { using (client = new AmazonS3Client(new AmazonS3Config() { MaxErrorRetry = 2, ThrottleRetries = true })) { // Creates the bucket. S3DirectoryInfo rootDirectory = new S3DirectoryInfo(client, bucketName); rootDirectory.Create(); // Creates a file at the root of the bucket. S3FileInfo readme = rootDirectory.GetFile("README.txt"); using (StreamWriter writer = new StreamWriter(readme.OpenWrite())) writer.WriteLine("This is my readme file."); // Create a directory called code and write a file to it. S3DirectoryInfo codeDir = rootDirectory.CreateSubdirectory("wiki"); S3FileInfo codeFile = codeDir.GetFile("Phantasmagoria.txt"); using (StreamWriter writer = new StreamWriter(codeFile.OpenWrite())) { writer.WriteLine("Phantasmagoria (About this sound American pronunciation (help·info), also fantasmagorie, fantasmagoria) was a form of horror theatre that "); writer.WriteLine("(among other techniques) used one or more magic lanterns to project frightening images such as skeletons, demons, and "); writer.WriteLine("ghosts onto walls, smoke, or semi-transparent screens, typically using rear projection to keep the lantern out of sight. Mobile or "); writer.WriteLine("portable projectors were used, allowing the projected image to move and change size on the screen, and multiple projecting "); writer.WriteLine("devices allowed for quick switching of different images. In many shows the use of spooky decoration, total darkness, sound "); writer.WriteLine("effects, (auto-)suggestive verbal presentation and sound effects were also key elements. Some shows added all kinds of "); writer.WriteLine("sensory stimulation, including smells and electric shocks. Even required fasting, fatigue (late shows) and drugs have been "); writer.WriteLine("mentioned as methods of making sure spectators would be more convinced of what they saw. The shows started under the "); writer.WriteLine("guise of actual séances in Germany in the late 18th century, and gained popularity through most of Europe (including Britain) "); writer.WriteLine("throughout the 19th century."); } // Create a directory called license and write a file to it. S3DirectoryInfo licensesDir = rootDirectory.CreateSubdirectory("licenses"); S3FileInfo licenseFile = licensesDir.GetFile("license.txt"); using (StreamWriter writer = new StreamWriter(licenseFile.OpenWrite())) writer.WriteLine("A license to code"); Console.WriteLine("Write Directory Structure"); Console.WriteLine("------------------------------------"); WriteDirectoryStructure(rootDirectory, 0); Console.WriteLine("\n\n"); foreach (var file in codeDir.GetFiles()) { Console.WriteLine("Content of {0}", file.Name); Console.WriteLine("------------------------------------"); using (StreamReader reader = file.OpenText()) { Console.WriteLine(reader.ReadToEnd()); } } // Deletes all the files and then the bucket. if (deleteAtEnd) { rootDirectory.Delete(true); } } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
static void Main(string[] args) { if (checkRequiredFields()) { using (client = Amazon.AWSClientFactory.CreateAmazonS3Client(RegionEndpoint.USWest2)) { // Creates the bucket. S3DirectoryInfo rootDirectory = new S3DirectoryInfo(client, bucketName); rootDirectory.Create(); // Creates a file at the root of the bucket. S3FileInfo readme = rootDirectory.GetFile("README.txt"); using (StreamWriter writer = new StreamWriter(readme.OpenWrite())) writer.WriteLine("This is my readme file."); DirectoryInfo localRoot = new DirectoryInfo(@"C:\"); DirectoryInfo localCode = localRoot.CreateSubdirectory("code"); // Create a directory called code and write a file to it. S3DirectoryInfo codeDir = rootDirectory.CreateSubdirectory("code"); S3FileInfo codeFile = codeDir.GetFile("Program.cs"); using(StreamWriter writer = new StreamWriter(codeFile.OpenWrite())) { writer.WriteLine("namespace S3FileSystem_Sample"); writer.WriteLine("{"); writer.WriteLine(" class Program"); writer.WriteLine(" {"); writer.WriteLine(" static void Main(string[] args)"); writer.WriteLine(" {"); writer.WriteLine(" Console.WriteLine(\"Hello World\");"); writer.WriteLine(" }"); writer.WriteLine(" }"); writer.WriteLine("}"); } // Create a directory called license and write a file to it. S3DirectoryInfo licensesDir = rootDirectory.CreateSubdirectory("licenses"); S3FileInfo licenseFile = licensesDir.GetFile("license.txt"); using (StreamWriter writer = new StreamWriter(licenseFile.OpenWrite())) writer.WriteLine("A license to code"); Console.WriteLine("Write Directory Structure"); Console.WriteLine("------------------------------------"); WriteDirectoryStructure(rootDirectory, 0); Console.WriteLine("\n\n"); foreach (var file in codeDir.GetFiles()) { Console.WriteLine("Content of {0}", file.Name); Console.WriteLine("------------------------------------"); using (StreamReader reader = file.OpenText()) { Console.WriteLine(reader.ReadToEnd()); } } // Deletes all the files and then the bucket. rootDirectory.Delete(true); } } Console.WriteLine("Press any key to continue..."); Console.ReadKey(); }
private void btnScript_Click(object sender, EventArgs e) { set2Config(); u_config.printConfig(@"C:\Wavelets decomposition\config.txt", null); // Create a client AmazonS3Config confisS3 = new AmazonS3Config { ProxyHost = null }; TimeSpan timeOUT = new TimeSpan(3, 0, 0); confisS3.ReadWriteTimeout = timeOUT; confisS3.Timeout = timeOUT; AmazonS3Client client = new AmazonS3Client(confisS3); UseS3 = UseS3CB.Checked; rumPrallel = rumPrallelCB.Checked; runBoosting = runBoostingCB.Checked; runProoning = runProoningCB.Checked; runBoostingProoning = runBoostingProoningCB.Checked; runRFProoning = runRFProoningCB.Checked; runRf = runRfCB.Checked; runBoostingLearningRate = runBoostingLearningRateCB.Checked; bucketName = bucketTB.Text; string results_path = @ResultsTB.Text; string db_path = @DBTB.Text + "\\";//@"C:\Users\Administrator\Dropbox\ADA\ada_valid\"; //"D:\\Phd\\Shai\\code\\tests\\helix tests\\noise_5\\noise_5\\"; // "C:\\reasearch\\tests\\lena\\"; //get dir MainFolderName = results_path; if (!UseS3) { if (!Directory.Exists(MainFolderName)) { Directory.CreateDirectory(MainFolderName); } } if (UseS3) { S3DirectoryInfo s3results_path = new S3DirectoryInfo(client, bucketName, results_path); if (!s3results_path.Exists) { s3results_path.Create(); } //set archive path //S3DirectoryInfo s3archive_path = new S3DirectoryInfo(client, bucketName, results_path + "\\archive"); //if (!s3archive_path.Exists) // s3archive_path.Create(); } //READ DATA DB db = new DB(); db.training_dt = db.getDataTable(db_path + "trainingData.txt"); db.testing_dt = db.getDataTable(db_path + "testingData.txt"); db.validation_dt = db.getDataTable(db_path + "ValidData.txt"); db.training_label = db.getDataTable(db_path + "trainingLabel.txt"); db.testing_label = db.getDataTable(db_path + "testingLabel.txt"); db.validation_label = db.getDataTable(db_path + "ValidLabel.txt"); //db.training_label = db.getDataTable(db_path + "trainingLabel" + t.ToString() + ".txt"); upper_label = db.training_label.Max(); lower_label = db.training_label.Min(); double trainingPercent = double.Parse(trainingPercentTB.Text); // 0.02; long rowToRemoveFrom = Convert.ToInt64(db.training_dt.Count() * trainingPercent); db.training_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.training_label = db.training_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_dt = db.testing_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.testing_label = db.testing_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_dt = db.training_dt.Where((el, i) => i < rowToRemoveFrom).ToArray(); db.validation_label = db.validation_label.Where((el, i) => i < rowToRemoveFrom).ToArray(); ////REDUCE DIM, GLOBAL PCA if (usePCA.Checked) { DimReduction dimreduction = new DimReduction(db.training_dt); db.PCAtraining_dt = dimreduction.getGlobalPca(db.training_dt); db.PCAtesting_dt = dimreduction.getGlobalPca(db.testing_dt); db.PCAvalidation_dt = dimreduction.getGlobalPca(db.validation_dt); //printtable(db.PCAtraining_dt, MainFolderName + "\\PCAtraining_dt.txt");//dbg //printtable(db.PCAtesting_dt, MainFolderName + "\\PCAtesting_dt.txt");//dbg } else { //de-activate pca for dbg db.PCAtraining_dt = db.training_dt; db.PCAtesting_dt = db.testing_dt; db.PCAvalidation_dt = db.validation_dt; } db.PCAtraining_GridIndex_dt = new long[db.PCAtraining_dt.Count()][]; for (int i = 0; i < db.PCAtraining_dt.Count(); i++) { db.PCAtraining_GridIndex_dt[i] = new long[db.PCAtraining_dt[i].Count()]; } //BOUNDING BOX AND MAIN GRID boundingBox = db.getboundingBox(db.PCAtraining_dt); MainGrid = db.getMainGrid(db.PCAtraining_dt, boundingBox, ref db.PCAtraining_GridIndex_dt); //READ CONFIG methodConfig mc = new methodConfig(true); int Nloops = int.Parse(NloopsTB.Text) - 1; int Kfolds = 0; if (int.TryParse(croosValidTB.Text, out Kfolds)) { Nloops = Kfolds - 1; } for (int k = 0; k < Nloops; k++) { mc.boostlamda_0.Add(3.8); // - create variant in number of pixels } //mc.boostlamda_0.Add(1500);// - create variant in number of pixels //mc.boostlamda_0.Add(2500);// - create variant in number of pixels //mc.boostlamda_0.Add(3000);// - create variant in number of pixels mc.generateRecordConfigArr(); for (int k = 0; k < mc.recArr.Count(); k++) { mc.recArr[k].dim = NfeaturesTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesTB.Text, k)); mc.recArr[k].approxThresh = double.Parse(evaluateString(approxThreshTB.Text, k)); // 0.1; mc.recArr[k].partitionErrType = int.Parse(evaluateString(partitionTypeTB.Text, k)); //2; mc.recArr[k].minWaveSize = int.Parse(evaluateString(minNodeSizeTB.Text, k)); //1;//CHANGE AFTER DBG mc.recArr[k].hopping_size = int.Parse(evaluateString(waveletsSkipEstimationTB.Text, k)); //25;// 10 + 5 * (k + 1);// +5 * (k % 10);// 1;//25; mc.recArr[k].test_error_size = double.Parse(evaluateString(waveletsPercentEstimationTB.Text, k)); // +0.05 * (k % 10);// 1;// 0.1;//percent of waves to check mc.recArr[k].NskipsinKfunc = double.Parse(evaluateString(boostingKfuncPercentTB.Text, k)); // 0.0025; mc.recArr[k].rfBaggingPercent = double.Parse(evaluateString(bagginPercentTB.Text, k)); // 0.6; mc.recArr[k].rfNum = int.Parse(evaluateString(NrfTB.Text, k)); // k + 1;//10 + k*10;// 100 / (k + 46) * 2;// int.Parse(Math.Pow(10, k + 1).ToString()); mc.recArr[k].boostNum = int.Parse(evaluateString(NboostTB.Text, k)); // 10; mc.recArr[k].boostProoning_0 = int.Parse(evaluateString(NfirstPruninginBoostingTB.Text, k)); //13 mc.recArr[k].boostlamda_0 = double.Parse(evaluateString(boostingLamda0TB.Text, k)); // 0.01 - (k + 1) * 0.001; //0.05;// 0.0801 + k * 0.001;// Math.Pow(0.1, k);// 0.22 + k*0.005; mc.recArr[k].NwaveletsBoosting = int.Parse(evaluateString(NfirstwaveletsBoostingTB.Text, k)); // 4;// k + 1; //mc.recArr[k].learningRate = 0;// 0.01; mc.recArr[k].boostNumLearningRate = int.Parse(evaluateString(NboostingLearningRateTB.Text, k)); // 55;// 18; mc.recArr[k].percent_training_db = trainingPercent; mc.recArr[k].BoundLevel = int.Parse(evaluateString(boundLevelTB.Text, k)); //1024; mc.recArr[k].NDimsinRF = NfeaturesrfTB.Text == @"all" ? db.PCAtraining_dt[0].Count() : int.Parse(evaluateString(NfeaturesrfTB.Text, k)); mc.recArr[k].split_type = int.Parse(evaluateString(splitTypeTB.Text, k)); //0 mc.recArr[k].NormLPType = int.Parse(evaluateString(errTypeEstimationTB.Text, k)); mc.recArr[k].RFpruningTestRange[1] = int.Parse(evaluateString(RFpruningEstimationRange1TB.Text, k)); // 12;// k + 9; mc.recArr[k].boundDepthTree = int.Parse(evaluateString(boundDepthTB.Text, k)); //1024; mc.recArr[k].CrossValidFold = k; // 2m0rr0w2 save labels dim in confif mc.recArr[k].labelDim = db.training_label[0].Count(); //mc.recArr[k].boostNum = t ;// tmp to delete !!!!!!! //mc.recArr[k].RFwaveletsTestRange[0] = 25; //mc.recArr[k].RFwaveletsTestRange[1] = 50; } //create dirs foreach (recordConfig t in mc.recArr) { if (!UseS3 && !Directory.Exists(MainFolderName + "\\" + t.getShortName())) { Directory.CreateDirectory(MainFolderName + "\\" + t.getShortName()); StreamWriter sw = new StreamWriter(MainFolderName + "\\" + t.getShortName() + "\\record_properties.txt", false); sw.WriteLine(t.getFullName()); sw.Close(); u_config.printConfig(MainFolderName + "\\config.txt", null); } if (!UseS3) { continue; } S3DirectoryInfo s3results_path_with_folders = new S3DirectoryInfo(client, bucketName, results_path + "\\" + t.getShortName()); if (!s3results_path_with_folders.Exists) { s3results_path_with_folders.Create(); S3FileInfo outFile = s3results_path_with_folders.GetFile("record_properties.txt"); StreamWriter sw = new StreamWriter(outFile.OpenWrite()); sw.WriteLine(t.getFullName()); sw.Close(); S3FileInfo configFile = s3results_path_with_folders.GetFile("config.txt"); u_config.printConfig("", configFile); } } //SET ID ARRAY LIST List <int> trainingID = Enumerable.Range(0, db.PCAtraining_dt.Count()).ToList(); List <int> testingID = Enumerable.Range(0, db.PCAtesting_dt.Count()).ToList(); //cross validation List <List <int> > trainingFoldId = new List <List <int> >(); List <List <int> > testingFoldId = new List <List <int> >(); Random ran = new Random(2); List <int> training_rand = trainingID.OrderBy(x => ran.Next()).ToList().GetRange(0, trainingID.Count); //THE LARGEST GROUP IS TRAINING if (int.TryParse(croosValidTB.Text, out Kfolds)) { createCrossValid(Kfolds, training_rand, trainingFoldId, testingFoldId); } //bounding intervals int[][] BB = new int[2][]; BB[0] = new int[boundingBox[0].Count()]; BB[1] = new int[boundingBox[0].Count()]; for (int i = 0; i < boundingBox[0].Count(); i++) { BB[1][i] = MainGrid[i].Count() - 1; //set last index in each dim } for (int i = 0; i < mc.recArr.Count; i++) { Analizer Analizer = new Analizer(MainFolderName + "\\" + mc.recArr[i].getShortName(), MainGrid, db, mc.recArr[i]); if (!croosValidCB.Checked) { Analizer.analize(trainingID, testingID, BB); } else { Analizer.analize(trainingFoldId[i], testingFoldId[i], BB); //cross validation } } btnScript.BackColor = Color.Green; }
public static List <GeoWave> getConstWaveletsFromFile(string filename, recordConfig rc) { if (!Form1.UseS3 && !File.Exists(filename))//this func was not debugged after modification { MessageBox.Show(@"the file " + Path.GetFileName(filename) + @" doesnt exist in " + Path.GetFullPath(filename)); return(null); } StreamReader sr; if (Form1.UseS3) { string dir_name = Path.GetDirectoryName(filename); string file_name = Path.GetFileName(filename); S3DirectoryInfo s3dir = new S3DirectoryInfo(Form1.S3client, Form1.bucketName, dir_name); S3FileInfo artFile = s3dir.GetFile(file_name); sr = artFile.OpenText(); } else { sr = new StreamReader(File.OpenRead(filename)); } string[] values = { "" }; string line; string DimensionReductionMatrix = ""; int numOfWavlets = -1; int dimension = -1; int labelDimension = -1; double approxOrder = -1; while (!sr.EndOfStream && values[0] != "StartReading") { line = sr.ReadLine(); values = line.Split(Form1.seperator, StringSplitOptions.RemoveEmptyEntries); if (values[0] == "DimensionReductionMatrix") { DimensionReductionMatrix = values[1]; } else if (values[0] == "numOfWavlets") { numOfWavlets = int.Parse(values[1]); } else if (values[0] == "approxOrder") { approxOrder = int.Parse(values[1]); } else if (values[0] == "dimension") { dimension = int.Parse(values[1]); } else if (values[0] == "labelDimension") { labelDimension = int.Parse(values[1]); } else if (values[0] == "StartReading") { ; } else { MessageBox.Show(@"the file " + Path.GetFileName(filename) + @" already exist in " + Path.GetFullPath(filename) + @" might have bad input !"); } } //read values List <GeoWave> gwArr = new List <GeoWave>(); while (!sr.EndOfStream) { GeoWave gw = new GeoWave(dimension, labelDimension, rc); line = sr.ReadLine(); if (line != null) { values = line.Split(Form1.seperator, StringSplitOptions.RemoveEmptyEntries); } gw.ID = int.Parse(values[0]); gw.child0 = int.Parse(values[1]); gw.child1 = int.Parse(values[2]); int counter = 0; for (int j = 0; j < dimension; j++) { gw.boubdingBox[0][j] = int.Parse(values[3 + 4 * j]);//the next are the actual values and not the indeces int the maingrid - so we skip 4 elementsat a time gw.boubdingBox[1][j] = int.Parse(values[4 + 4 * j]); counter = 4 + 2 * 4; } gw.level = int.Parse(values[counter + 1]); counter = counter + 2; for (int j = 0; j < labelDimension; j++) { gw.MeanValue[j] = double.Parse(values[counter + j]); counter++; } gw.norm = double.Parse(values[counter]); gw.parentID = int.Parse(values[counter + 1]); gwArr.Add(gw); } sr.Close(); return(gwArr); }