private static GeocodeFeed ParseDelimitedFile(StreamReader textReader, BatchFileFormat format) { char delimiter; switch (format) { case BatchFileFormat.PIPE: delimiter = '|'; break; case BatchFileFormat.TAB: delimiter = '\t'; break; case BatchFileFormat.CSV: default: delimiter = ','; break; } var feed = new GeocodeFeed(); double schemaVersion = 1.0; using (var reader = new DelimitedFileReader(textReader, delimiter)) { var row = reader.GetNextRow(); if (row != null && row.Count > 0) { //Parse Schema Version info. if (row[0].StartsWith("Bing Spatial Data Services", StringComparison.OrdinalIgnoreCase) && row.Count >= 2) { double.TryParse(row[1], out schemaVersion); row = reader.GetNextRow(); } //Skip header if (string.Compare(row[0], "Id", StringComparison.OrdinalIgnoreCase) == 0 || string.Compare(row[0], "GeocodeEntity/@Id", StringComparison.OrdinalIgnoreCase) == 0) { row = reader.GetNextRow(); } //Parse rows while (row != null && row.Count > 0) { var parsedRow = ParseRow(row, schemaVersion); if (parsedRow != null) { feed.Entities.Add(parsedRow); } row = reader.GetNextRow(); } } } return(feed); }
public void TwoDataRowsTest() { var tempFile = Path.GetTempFileName(); using (var writer = new StreamWriter(File.OpenWrite(tempFile))) { writer.WriteLine("col1,col2,col3"); writer.WriteLine("row1-val1,row1-val2,row1-val3"); writer.WriteLine("row2-val1,row2-val2,row2-val3"); writer.Close(); } using (var reader = new DelimitedFileReader(tempFile, ',')) { var values = reader.ReadValues(); Assert.IsNotNull(values); Assert.AreEqual(3, values.Count); Assert.AreEqual("row1-val1", values["col1"]); Assert.AreEqual("row1-val2", values["col2"]); Assert.AreEqual("row1-val3", values["col3"]); // values = reader.ReadValues(); Assert.IsNotNull(values); Assert.AreEqual(3, values.Count); Assert.AreEqual("row2-val1", values["col1"]); Assert.AreEqual("row2-val2", values["col2"]); Assert.AreEqual("row2-val3", values["col3"]); // values = reader.ReadValues(); Assert.IsNull(values); } File.Delete(tempFile); }
public void LoadSequenceData(string path, int datasetID, IDatabaseSearchSequenceDAO databaseSequenceCache) { IMageSink sink = null; if (path.ToLower().EndsWith("fht.txt")) { UpdateStatus("First Hit File MAGE Sink created. "); var sequest = new SequestFirstHitSink(databaseSequenceCache); sequest.DatasetID = datasetID; sink = sequest; } else { UpdateStatus("File type is not supported for this kind of sequence data. "); return; } var reader = new DelimitedFileReader(); reader.Delimiter = "\t"; reader.FilePath = path; var pipeline = ProcessingPipeline.Assemble("PlainFactors", reader, sink); pipeline.RunRoot(null); sink.CommitChanges(); }
public void NoDelimitersPathTest() { var tempFile = Path.GetTempFileName(); using (var writer = new StreamWriter(File.OpenWrite(tempFile))) { writer.WriteLine("xxx"); writer.Close(); } using (var reader = new DelimitedFileReader(tempFile)) { } }
/// <summary> /// Gets records from a file /// </summary> /// <typeparam name="BusinessObjectT">Class type</typeparam> /// <param name="fileName">Name of the file (no path, no extension, we know where stuff should be!)</param> /// <returns>List of items</returns> public static async Task <IReadOnlyList <BusinessObjectT> > GetRecords <BusinessObjectT>(string fileName) where BusinessObjectT : class, new() { bool hasUsableFile = await EnsureLatestFile(fileName); if (!hasUsableFile) { return(null); } DelimitedFileReader reader = new DelimitedFileReader(Path.Combine(Startup.Environment.ContentRootPath, "_data", (fileName + ".csv")), DelimitedFileReader.DELIMITER_COMMA, hasHeaders: true); return(reader.GetRecordsList <BusinessObjectT>().AsReadOnly()); }
public void OneDelimiterPathTest() { var tempFile = Path.GetTempFileName(); using (var writer = new StreamWriter(File.OpenWrite(tempFile))) { writer.WriteLine("col1,col2"); writer.Close(); } using (var reader = new DelimitedFileReader(tempFile, ',')) { Assert.IsNotNull(reader.ColumnPositions); Assert.AreEqual(2, reader.ColumnPositions.Length); Assert.AreEqual("col1", reader.ColumnPositions[0]); Assert.AreEqual("col2", reader.ColumnPositions[1]); } File.Delete(tempFile); }
static void TestWriter() { Console.WriteLine("Testing SujaySarma.Sdk.FormattedIO.Csv.DelimitedFileWriter..."); Console.WriteLine(); DelimitedFileWriter writer = new DelimitedFileWriter(@"C:\Users\Sujay Sarma\Desktop\airports_test.txt", DelimitedFileWriter.DELIMITER_COMMA); DelimitedFileReader reader = new DelimitedFileReader(@"D:\Src\products\SujaySarma.Api.OurAirports\src\_data\airports.csv", DelimitedFileReader.DELIMITER_COMMA, hasHeaders: true); IList <Airport> list1 = reader.GetRecordsList <Airport>(0, -1); Console.WriteLine($"Airports: Read -- [{list1.Count}] row(s)..."); DateTime startTime = DateTime.Now, endTime; writer.WriteRecords(list1, true); endTime = DateTime.Now; Console.WriteLine($"Airports: Wrote -- [{list1.Count}] row(s) in {(endTime - startTime).ToString(@"mm\:ss\.ffff")}"); Console.WriteLine(); }
public void DataRowWithExtraColumnsTest() { var tempFile = Path.GetTempFileName(); using (var writer = new StreamWriter(File.OpenWrite(tempFile))) { writer.WriteLine("col1,col2"); writer.WriteLine("val1,val2,val3"); writer.Close(); } using (var reader = new DelimitedFileReader(tempFile, ',')) { var values = reader.ReadValues(); Assert.IsNotNull(values); Assert.AreEqual(2, values.Count); Assert.AreEqual("val1", values["col1"]); Assert.AreEqual("val2", values["col2"]); Assert.IsFalse(values.ContainsKey("col3")); } File.Delete(tempFile); }
internal SlotProvider Parse(string[] args) { if (args.Length > 0) { var args_MAX_OFFSET_BETWEEN_SERVICES = Convert.ToInt32(args[0]); MAX_OFFSET_BETWEEN_SERVICES = args_MAX_OFFSET_BETWEEN_SERVICES; } log.Info("[START]"); var reader = new DelimitedFileReader(@"Tests\input.csv", 0, 0, ','); reader.SkipEmptyRows = true; List <CSVData> list = reader.ToList <CSVData>(); list.Remove(list.FindLast(x => true)); //trial thing? var slotProvider = new SlotProvider(list); //var someSlots = slotProvider.Provide(new Day(4)); return(slotProvider); }
public MassTagDatabase LoadDatabase() { var database = new MassTagDatabase(); var sink = new MAGEMetaSampleDatabaseSink(); var reader = new DelimitedFileReader(); reader.Delimiter = ","; reader.FilePath = Path; var pipeline = ProcessingPipeline.Assemble("MetaSample", reader, sink); pipeline.RunRoot(null); var tags = sink.MassTags.FindAll( delegate(MassTagLight x) { return x.ObservationCount >= m_options.MinimumObservationCountFilter; }); database.AddMassTagsAndProteins(tags, new Dictionary<int, List<Protein>>()); // Fill in logic to read new type of mass tag database. return database; }
public void LoadFactorsFromFile(string path, ObservableCollection <DatasetInformation> datasets, FeatureDataAccessProviders providers) { if (datasets.Count < 1) { return; } var sink = new MultiAlignFactorSink(datasets, providers.DatasetCache, providers.FactorCache, providers.FactorAssignmentCache); var reader = new DelimitedFileReader(); reader.Delimiter = "\t"; reader.FilePath = path; var pipeline = ProcessingPipeline.Assemble("PlainFactors", reader, sink); pipeline.RunRoot(null); sink.CommitChanges(); }
internal static async Task ImportFileContentAsync(LarsCosmosDbCollectionSettings settings, DocumentClient client, string fileName, Stream stream) { if (TryGetCollectionId(settings, fileName, out string collectionId)) { var headings = new List <string>(); using (var reader = new StreamReader(stream)) { var lineNumber = 0; foreach (var line in DelimitedFileReader.ReadLines(reader, new DelimitedFileSettings(true))) { lineNumber++; if (lineNumber == 1) { headings = line.Fields.Select(f => CleanHeading(f.Value)).ToList(); continue; } var dict = new Dictionary <string, object>(); foreach (var field in line.Fields) { dict[headings[field.Number - 1]] = field.Value; } var doc = dict.ToExpandoObject(); await client.CreateDocumentAsync( UriFactory.CreateDocumentCollectionUri(settings.DatabaseId, collectionId), doc); } } } }
public MassTagDatabase LoadDatabase() { var database = new MassTagDatabase(); var sink = new MAGEMetaSampleDatabaseSink(); var reader = new DelimitedFileReader(); reader.Delimiter = ","; reader.FilePath = Path; var pipeline = ProcessingPipeline.Assemble("MetaSample", reader, sink); pipeline.RunRoot(null); var tags = sink.MassTags.Where(x => x.ObservationCount >= m_options.MinimumObservationCountFilter); database.AddMassTagsAndProteins(tags.ToList(), new Dictionary <int, List <Protein> >()); // Fill in logic to read new type of mass tag database. return(database); }
static void TestReader() { Console.WriteLine("Testing SujaySarma.Sdk.FormattedIO.Csv.DelimitedFileReader..."); Console.WriteLine(); //https://ourairports.com/data/countries.csv DelimitedFileReader reader; DateTime startTime, endTime; reader = new DelimitedFileReader(@"D:\Src\products\SujaySarma.Api.OurAirports\src\_data\airports.csv", hasHeaders: true); string iata = null; do { Console.WriteLine("Enter airport IATA to search for (blank+ENTER to stop): "); iata = Console.ReadLine()?.ToUpper(); if (!string.IsNullOrWhiteSpace(iata)) { foreach (Airport airport in reader.GetRecords <Airport>()) { if ((airport.Iata == iata)) { Console.WriteLine( JsonSerializer.Serialize(airport, new JsonSerializerOptions() { Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, IgnoreNullValues = false, WriteIndented = true }) ); break; } } Console.WriteLine(); } } while (!string.IsNullOrWhiteSpace(iata)); return; for (int i = 0; i < 4; i++) { reader = new DelimitedFileReader(@"D:\Src\products\SujaySarma.Api.OurAirports\src\_data\countries.csv", hasHeaders: true); startTime = DateTime.Now; IList <Country> list1 = reader.GetRecordsList <Country>(0, -1); endTime = DateTime.Now; Console.WriteLine($"Countries: Run #{i} -- [{list1.Count}] row(s) in {(endTime - startTime).ToString(@"mm\:ss\.ffff")}"); Console.WriteLine(); } Console.WriteLine(); Console.WriteLine("----> Press ENTER to continue..."); Console.ReadLine(); for (int i = 0; i < 3; i++) { reader = new DelimitedFileReader(@"D:\Src\products\SujaySarma.Api.OurAirports\src\_data\regions.csv", hasHeaders: true); startTime = DateTime.Now; IList <Region> list2 = new List <Region>(reader.GetRecords <Region>(0, -1)); endTime = DateTime.Now; Console.WriteLine($"Regions: Run #{i} -- [{list2.Count}] row(s) in {(endTime - startTime).ToString(@"mm\:ss\.ffff")}"); Console.WriteLine(); } Console.WriteLine(); Console.WriteLine("----> Press ENTER to continue..."); Console.ReadLine(); for (int i = 0; i < 3; i++) { reader = new DelimitedFileReader(@"D:\Src\products\SujaySarma.Api.OurAirports\src\_data\airports.csv", hasHeaders: true); startTime = DateTime.Now; IList <Airport> list3 = new List <Airport>(reader.GetRecords <Airport>(0, -1)); endTime = DateTime.Now; Console.WriteLine($"Airports: Run #{i} -- [{list3.Count}] row(s) in {(endTime - startTime).ToString(@"mm\:ss\.ffff")}"); Console.WriteLine(); } Console.WriteLine(); Console.WriteLine("----> Press ENTER to continue..."); Console.ReadLine(); //Country c = reader.GetRecord<Country>(19); //Console.WriteLine("Country at position #19:"); //foreach(PropertyInfo p in (typeof(Country).GetProperties())) //{ // Console.Write($"\t{p.Name}\t\t"); // if (p.PropertyType.IsEnumerable()) // { // foreach(object o in (IEnumerable<object>)p.GetValue(c)) // { // Console.Write($" ,{o}"); // } // } // else // { // Console.WriteLine(p.GetValue(c)); // } //} //Console.WriteLine(); //string[] columns = reader.GetRow(); //int row = 0; //while ((columns != null) && (columns.Length > 0)) //{ // Console.Write($"{row.ToString()}. [Fields: {columns.Length}]\t"); // for (int i = 0; i < columns.Length; i++) // { // Console.Write($"\t{columns[i]}"); // } // Console.WriteLine(); // columns = reader.GetRow(); // row++; //} }
/// <summary> /// Run the parsimony algorithm against the peptides and proteins in table sourceTableName in the specified SQLite database /// </summary> /// <param name="databaseFolderPath"></param> /// <param name="dataBaseFileName"></param> /// <param name="sourceTableName">Table name to process</param> /// <returns>True if success; false if an error</returns> public bool ProcessSQLite(string databaseFolderPath, string dataBaseFileName, string sourceTableName = DEFAULT_SQLITE_TABLE) { List <RowEntry> pepToProtMapping; List <Node> result; var diDataFolder = new DirectoryInfo(databaseFolderPath); if (!diDataFolder.Exists) { throw new DirectoryNotFoundException("Database folder not found: " + databaseFolderPath); } var fiDatabaseFile = new FileInfo(Path.Combine(diDataFolder.FullName, dataBaseFileName)); if (!fiDatabaseFile.Exists) { throw new FileNotFoundException("Database not found: " + fiDatabaseFile); } if (ShowProgressAtConsole) { OnStatusEvent("Opening SQLite database " + fiDatabaseFile.FullName); } if (!VerifySourceTableExists(fiDatabaseFile, sourceTableName)) { return(false); } var dbReader = new SQLiteReader { Database = fiDatabaseFile.FullName }; try { var success = GetPeptideProteinMap(dbReader, sourceTableName, out pepToProtMapping); if (!success) { if (ShowProgressAtConsole) { OnErrorEvent("Error loading data from table {0}; GetPeptideProteinMap returned false", sourceTableName); } return(false); } if (ShowProgressAtConsole) { OnStatusEvent("Loaded {0} rows from table {1}", pepToProtMapping.Count, sourceTableName); } } catch (Exception ex) { throw new Exception("Error calling GetPeptideProteinMap: " + ex.Message, ex); } if (fiDatabaseFile.DirectoryName == null) { throw new Exception("Error determining the parent directory for " + fiDatabaseFile.FullName); } var parsimonyResultsFilePath = Path.Combine(fiDatabaseFile.DirectoryName, "pars_info_temp.txt"); var proteinGroupMembersFilePath = Path.Combine(fiDatabaseFile.DirectoryName, "pars_info_temp_groups.txt"); if (pepToProtMapping == null || pepToProtMapping.Count == 0) { DeleteFile(parsimonyResultsFilePath); DeleteFile(proteinGroupMembersFilePath); throw new Exception("Error in RunAlgorithm: No rows to operate on"); } GlobalIDContainer globalIDTracker; try { PerformParsimonyThreaded(pepToProtMapping, out result, out globalIDTracker); } catch (Exception ex) { throw new Exception("Error calling PerformParsimonyThreaded: " + ex.Message, ex); } if (ShowProgressAtConsole) { Console.WriteLine(); OnStatusEvent("Exporting protein groups to temp text files"); } Utilities.SaveResults(result, parsimonyResultsFilePath, proteinGroupMembersFilePath, globalIDTracker); ClearExistingSQLiteResults(fiDatabaseFile); if (ShowProgressAtConsole) { Console.WriteLine(); } try { var resultsReader = new DelimitedFileReader { FilePath = parsimonyResultsFilePath }; var writer = new SQLiteWriter { DbPath = fiDatabaseFile.FullName, TableName = PARSIMONY_GROUPING_TABLE }; writer.ColDefOverride = new List <MageColumnDef> { new("GroupID", "integer", "4") // Note that "size" doesn't matter since we're writing to a SqLite database }; if (ShowProgressAtConsole) { OnStatusEvent("Importing data into table " + PARSIMONY_GROUPING_TABLE); } ProcessingPipeline.Assemble("ImportToSQLite", resultsReader, writer).RunRoot(null); } catch (Exception ex) { throw new Exception("Error adding data to table " + PARSIMONY_GROUPING_TABLE + " to the SqLite database: " + ex.Message, ex); } try { var proteinGroupsReader = new DelimitedFileReader { FilePath = proteinGroupMembersFilePath }; var writer = new SQLiteWriter { DbPath = fiDatabaseFile.FullName, TableName = PARSIMONY_GROUP_MEMBERS_TABLE }; writer.ColDefOverride = new List <MageColumnDef> { new("GroupID", "integer", "4") // Note that "size" doesn't matter since we're writing to a SqLite database }; if (ShowProgressAtConsole) { OnStatusEvent("Importing data into table " + PARSIMONY_GROUP_MEMBERS_TABLE); } ProcessingPipeline.Assemble("ImportToSQLite", proteinGroupsReader, writer).RunRoot(null); } catch (Exception ex) { throw new Exception("Error adding data to table " + PARSIMONY_GROUP_MEMBERS_TABLE + " to the SqLite database: " + ex.Message, ex); } DeleteFile(parsimonyResultsFilePath); DeleteFile(proteinGroupMembersFilePath); return(true); }
public static string parse_to_billing_provider(string filename) { string returnmessage = ""; string isa_segment = ""; string iea_segment = ""; string gs_segment = ""; string ge_segment = ""; string se_segment = ""; string st_segment = ""; string transaction_set = ""; string line = ""; string segment_type = ""; bool is835 = true; bool fatal = false; bool first = true; string[] output = new string[1000]; string[] provider = new string[1000]; int i = 0; DelimitedFileReader remittance = new DelimitedFileReader(filename, "~\n"); try { line = remittance.Read(); if (line[0] == 'I') { do { string[] segment = line.Replace("~", "*").Split("*".ToCharArray()); segment_type = segment.GetValue(0).ToString(); switch (segment_type) { case "BPR": if (is835 == true) { if (first == false) i += 1; first = false; output[i] += line.ToString(); } break; case "GE": ge_segment = line.ToString(); break; case "GS": gs_segment = line.ToString(); break; case "IEA": iea_segment = line.ToString(); for (int ii = 0; ii <= i; ii++) { if (output.GetValue(ii).ToString().Contains("ISA*00") == false) output[ii] = isa_segment + gs_segment + output.GetValue(ii).ToString() + ge_segment + iea_segment; } break; case "ISA": isa_segment = line.ToString(); break; case "N1": if (is835 == true) { switch (segment.GetValue(1).ToString()) { case "PE": provider[i] = segment.GetValue(2).ToString(); break; default: break; } output[i] += line.ToString(); } break; case "SE": if (is835 == true) { se_segment = line.ToString(); for (int ii = 0; ii <= i; ii++) { if (output.GetValue(ii).ToString().Contains("ST*835") == false) output[ii] = st_segment + output.GetValue(ii).ToString() + se_segment; } } is835 = true; break; case "ST": if (segment.GetValue(1).ToString() != "835") { is835 = false; } else { transaction_set = segment.GetValue(3).ToString(); st_segment = line.ToString(); } break; default: if (is835 == true) { if (line.ToString().Length > 0) output[i] += line.ToString(); } break; } } while ((line = remittance.Read()) != "" && fatal == false); int iii = WriteFile(output, provider, filename); if (iii > 0) returnmessage += "File Processed, " + iii.ToString() + " files created."; else returnmessage += "There has been a problem."; } else { returnmessage += "This is not an X12 5010 file."; } } catch (Exception ex) { returnmessage = ex.ToString(); remittance.Close(); } finally { remittance.Close(); } return returnmessage; }
private IDataReader GetReader() { var reader = new DelimitedFileReader(@"_Data\letters.csv"); return(reader.GetReader()); }
public static string parse_to_billing_provider(string filename) { string returnmessage = ""; string isa_segment = ""; string iea_segment = ""; string gs_segment = ""; string ge_segment = ""; string se_segment = ""; string st_segment = ""; string transaction_set = ""; string line = ""; string segment_type = ""; bool is835 = true; bool fatal = false; bool first = true; string[] output = new string[1000]; string[] provider = new string[1000]; int i = 0; DelimitedFileReader remittance = new DelimitedFileReader(filename, "~\n"); try { line = remittance.Read(); if (line[0] == 'I') { do { string[] segment = line.Replace("~", "*").Split("*".ToCharArray()); segment_type = segment.GetValue(0).ToString(); switch (segment_type) { case "BPR": if (is835 == true) { if (first == false) { i += 1; } first = false; output[i] += line.ToString(); } break; case "GE": ge_segment = line.ToString(); break; case "GS": gs_segment = line.ToString(); break; case "IEA": iea_segment = line.ToString(); for (int ii = 0; ii <= i; ii++) { if (output.GetValue(ii).ToString().Contains("ISA*00") == false) { output[ii] = isa_segment + gs_segment + output.GetValue(ii).ToString() + ge_segment + iea_segment; } } break; case "ISA": isa_segment = line.ToString(); break; case "N1": if (is835 == true) { switch (segment.GetValue(1).ToString()) { case "PE": provider[i] = segment.GetValue(2).ToString(); break; default: break; } output[i] += line.ToString(); } break; case "SE": if (is835 == true) { se_segment = line.ToString(); for (int ii = 0; ii <= i; ii++) { if (output.GetValue(ii).ToString().Contains("ST*835") == false) { output[ii] = st_segment + output.GetValue(ii).ToString() + se_segment; } } } is835 = true; break; case "ST": if (segment.GetValue(1).ToString() != "835") { is835 = false; } else { transaction_set = segment.GetValue(3).ToString(); st_segment = line.ToString(); } break; default: if (is835 == true) { if (line.ToString().Length > 0) { output[i] += line.ToString(); } } break; } }while ((line = remittance.Read()) != "" && fatal == false); int iii = WriteFile(output, provider, filename); if (iii > 0) { returnmessage += "File Processed, " + iii.ToString() + " files created."; } else { returnmessage += "There has been a problem."; } } else { returnmessage += "This is not an X12 5010 file."; } } catch (Exception ex) { returnmessage = ex.ToString(); remittance.Close(); } finally { remittance.Close(); } return(returnmessage); }
static void Main(string[] args) { if (args.Length > 0) { var args_MAX_OFFSET_BETWEEN_SERVICES = Convert.ToInt32(args[0]); MAX_OFFSET_BETWEEN_SERVICES = args_MAX_OFFSET_BETWEEN_SERVICES; } log.Info("[START]"); var reader = new DelimitedFileReader(@"input.csv", 0, 0, ','); reader.SkipEmptyRows = true; List <CSVData> list = reader.ToList <CSVData>(); list.Remove(list.FindLast(x => true)); //trial thing? var slots = list.Select(CSVDataToSlot).ToArray(); //HashSet<char> allServices = new HashSet<char>() { 'L', 'T', 'P' }; //ServiceVariantSlot[] slots = new ServiceVariantSlot[] //{ // new ServiceVariantSlot() // { // Code = 'L', // StartTime = CalculateNumberOfMinutes(13), // EndTime = CalculateNumberOfMinutes(13,30) // }, // new ServiceVariantSlot() // { // Code = 'T', // StartTime = CalculateNumberOfMinutes(13), // EndTime = CalculateNumberOfMinutes(13,30) // }, // new ServiceVariantSlot() // { // Code = 'L', // StartTime = CalculateNumberOfMinutes(13,30), // EndTime = CalculateNumberOfMinutes(14) // }, // new ServiceVariantSlot() // { // Code = 'L', // StartTime = CalculateNumberOfMinutes(14), // EndTime = CalculateNumberOfMinutes(14,30) // }, // new ServiceVariantSlot() // { // Code = 'T', // StartTime = CalculateNumberOfMinutes(14,30), // EndTime = CalculateNumberOfMinutes(15) // }, // new ServiceVariantSlot() // { // Code = 'P', // StartTime = CalculateNumberOfMinutes(15), // EndTime = CalculateNumberOfMinutes(15,20) // }, // new ServiceVariantSlot() // { // Code = 'L', // StartTime = CalculateNumberOfMinutes(15), // EndTime = CalculateNumberOfMinutes(15,30) // } //}; //int?[][] M = new int?[6][] //{ // new int?[] {null, null, null, 60, 90, null}, // new int?[] {null, 30, null, 90, 90}, // new int?[] {null, 30, 60, null}, // new int?[] {null, 30, null}, // new int?[] {0, 0}, // new int?[] {null} //}; //var a = new Program(); //var matrix = a.GenerateMatrix(slots); // == M ////var R = a.Run(matrix, allServices, slots); //var R = a.RunRecursive(matrix, allServices, slots); Array.Sort(slots, delegate(ServiceVariantSlot s1, ServiceVariantSlot s2) { return(s1.StartTime - s2.StartTime); }); var alg = new Algorithm(MAX_OFFSET_BETWEEN_SERVICES); log.Info(String.Format("MAX_OFFSET_BETWEEN_SERVICES = {0}", MAX_OFFSET_BETWEEN_SERVICES)); var result = alg.Run(slots); var stringBuilder = new StringBuilder(); stringBuilder.AppendLine(); if (null != result) { result.OrderBy(b => b.SpareTime).ToList().ForEach(block => { stringBuilder.AppendLine($"BLOK (przerwy sumarycznie - {block.SpareTime} minut):"); block.ServiceVariantSlots.ToList().ForEach(slot => stringBuilder.AppendLine(PrintServiceVariantSlot(slot))); }); } else { stringBuilder.AppendLine("BRAK WYNIKU"); } log.Info(stringBuilder.ToString()); }
public void NullPathTest() { var reader = new DelimitedFileReader(null); }
public void LoadFactorsFromFile(string path, ObservableCollection<DatasetInformation> datasets, FeatureDataAccessProviders providers) { if (datasets.Count < 1) { return; } var sink = new MultiAlignFactorSink(datasets, providers.DatasetCache, providers.FactorCache, providers.FactorAssignmentCache); var reader = new DelimitedFileReader(); reader.Delimiter = "\t"; reader.FilePath = path; var pipeline = ProcessingPipeline.Assemble("PlainFactors", reader, sink); pipeline.RunRoot(null); sink.CommitChanges(); }
public void EmptyPathTest() { var reader = new DelimitedFileReader(string.Empty); }