/// <summary> /// Gets data table with all readings /// </summary> /// <param name="prefix">The prefix.</param> /// <returns></returns> public virtual DataTable GetDataTable(String prefix = "") { DataTable output = null; lock (GetDataTableLock) { output = new DataTable(prefix + name + "_measure"); output.Add("TakeN", "", "T_i", typeof(Int32)); output.Add("Time_min", "", "Td_min", typeof(Double)); output.Add("Between_sec", "", "Tb_sec", typeof(Double)); output.Add("Reading", "", "R_i", typeof(Double)); DateTime first = DateTime.MinValue; Int32 c = 1; foreach (IPerformanceTake t in takeList) { if (first == DateTime.MinValue) { first = t.samplingTime; } output.AddRow(c, t.samplingTime.Subtract(first).TotalMinutes, t.secondsSinceLastTake, t.reading); c++; } } return(output); }
private void FillMealList() { if (dataTable == null) { string[] headers = { "Name", "Cooking Time", "Protein Content", "Carb Content" }; dataTable = new DataTable(); for (int i = 0; i < headers.Length; i++) { dataTable.Columns.Add(headers[i]); } } else { dataTable.Clear(); } Meals = db.GetMeals(); foreach (Meal meal in Meals) { if (!plan.Contains(meal)) { dataTable.Add(meal); } } }
public void Compare_RightAdditionalRow_Result_RightOnly_Has1() { _right.Add(11, "RightTest"); var res = _comparer.Compare(_left, _right); res.RightOnly.Should().HaveCount(1); res.RightOnly.Single().Key().Should().Be(11); }
public override DataTable buildTableShema(DataTable output) { output.Add(termTableColumns.termName, "Dictionary form of the term", "T_nd", typeof(string), dataPointImportance.normal); output.Add(termTableColumns.termLemma, "Normal form", "T_n", typeof(string), dataPointImportance.normal); //output.Add(termTableColumns.freqAbs, "Absolute frequency - number of occurences", "T_af", typeof(Int32), dataPointImportance.normal, "Abs. freq."); //output.Add(termTableColumns.freqNorm, "Normalized frequency - abs. frequency divided by the maximum", "T_nf", typeof(Double), dataPointImportance.important, "#0.00000"); //output.Add(termTableColumns.df, "Document frequency - number of documents containing the term", "T_df", typeof(Int32), dataPointImportance.normal); //output.Add(termTableColumns.idf, "Inverse document frequency - logaritmicly normalized T_df", "T_idf", typeof(Double), dataPointImportance.normal, "#0.00000"); //output.Add(termTableColumns.tf_idf, "Term frequency Inverse document frequency - calculated as TF-IDF", "T_tf-idf", typeof(Double), dataPointImportance.important, "#0.00000"); output.Add(termTableColumns.semanticDistance, "Semantic distance from spark central term and this term", "T_sr", typeof(double), dataPointImportance.normal, "#0.00000", "Distance (nominal)"); output.Add(termTableColumns.normalizedSemanticDistance, "Normalized semantic relevance - divided by max(T_sr)", "T_nsr", typeof(double), dataPointImportance.important, "#0.00000", "Normalized semantic distance"); return(output); }
/// <summary> /// Gets the data table clean. /// </summary> /// <param name="tableName">Name of the table.</param> /// <param name="ds">The ds.</param> /// <param name="onlyTermAndFreq">if set to <c>true</c> [only term and freq].</param> /// <returns></returns> public DataTable GetDataTableClean(string tableName = "", DataSet ds = null, bool onlyTermAndFreq = false) { if (tableName.isNullOrEmpty()) { tableName = name; } DataTable output = new DataTable(); output.SetTitle(tableName); output.Add(termTableColumns.termName, "Nominal form of the term", "Tn", typeof(string), dataPointImportance.normal); output.Add(termTableColumns.termInflections, "Inflected words or otherwise related terms in the same semantic cloud, as CSV", "Ti", typeof(string), dataPointImportance.normal); output.Add(termTableColumns.freqAbs, "Absolute frequency - number of occurences", "T_af", typeof(int), dataPointImportance.normal, "Abs. freq."); if (!onlyTermAndFreq) { output.Add(termTableColumns.freqNorm, "Normalized frequency - abs. frequency divided by the maximum", "T_nf", typeof(double), dataPointImportance.important, "#0.00000"); output.Add(termTableColumns.df, "Document frequency - number of documents containing the term", "T_df", typeof(int), dataPointImportance.normal); output.Add(termTableColumns.idf, "Inverse document frequency - logaritmicly normalized T_df", "T_idf", typeof(double), dataPointImportance.normal, "#0.00000"); output.Add(termTableColumns.tf_idf, "Term frequency Inverse document frequency - calculated as TF-IDF", "T_tf-idf", typeof(double), dataPointImportance.important, "#0.00000"); output.Add(termTableColumns.cw, "Cumulative weight of term", "T_cw", typeof(double), dataPointImportance.normal, "#0.00000"); output.Add(termTableColumns.ncw, "Normalized cumulative weight of term", "T_ncw", typeof(double), dataPointImportance.important, "#0.00000"); } foreach (IWeightTableTerm t in terms.Values) { DataRow dr = output.NewRow(); dr[nameof(termTableColumns.termName)] = t.name; List <string> _all = t.GetAllForms(false); dr[nameof(termTableColumns.termInflections)] = _all.toCsvInLine(); dr[nameof(termTableColumns.freqAbs)] = GetAFreq(t.nominalForm); if (!onlyTermAndFreq) { dr[nameof(termTableColumns.freqNorm)] = GetNFreq(t.nominalForm); dr[nameof(termTableColumns.df)] = GetBDFreq(t.nominalForm); dr[nameof(termTableColumns.idf)] = GetIDF(t.nominalForm); dr[nameof(termTableColumns.tf_idf)] = GetTF_IDF(t.nominalForm); dr[nameof(termTableColumns.cw)] = GetWeight(t.nominalForm); dr[nameof(termTableColumns.ncw)] = GetNWeight(t.nominalForm); } output.Rows.Add(dr); } if (ds != null) { ds.AddTable(output); } return(output); }
/// <summary> /// Handle Core's data table manipulation on name change. /// </summary> /// <param name="oldName">Full Name before Change</param> /// <param name="newName">New Full Name</param> protected virtual void NameChangedHandler(string oldName, string newName) { if (DataTable != null) { try { DataTable.Rename(oldName, newName); } catch (ArgumentException ex) { if (ex.ParamName == "oldName") { DataTable.Add(newName, DataType, VarClass); } else { throw ex; } } if (DefaultValue != null) { DataTable.SetValue(newName, DefaultValue); } } }
public void Do(double a, double b, double Eps = 0.001) { NumberOfIteration = 0; DataTable.ClearTable(); double Delta = Eps / 10; Data.a = a; Data.b = b; Data.x1 = (Data.a + Data.b - Delta) / 2; Data.x2 = (Data.a + Data.b + Delta) / 2; Data.fx1 = Func.Value(Data.x1); Data.fx2 = Func.Value(Data.x2); DataTable.Add(Data.x1, Data.x2, Data.fx1, Data.fx2, Data.a, Data.b); NumberOfIterationsObjectiveFunction = 0; while (DataTable.Table[NumberOfIterationsObjectiveFunction++].difference_ab > Eps) { if (Data.fx1 < Data.fx2) { Data.b = Data.x2; } else { Data.a = Data.x1; } Data.x1 = (Data.a + Data.b - Delta) / 2; Data.x2 = (Data.a + Data.b + Delta) / 2; Data.fx1 = Func.Value(Data.x1); Data.fx2 = Func.Value(Data.x2); DataTable.Add(Data.x1, Data.x2, Data.fx1, Data.fx2, Data.a, Data.b); NumberOfIteration++; } NumberOfIterationsObjectiveFunction *= 2; }
private void GenerateRows(int count) { _watch.Restart(); var values = new[] { "Value1", "Value2", "Value3" }; int[] id = { 2 }; var generator = new Faker <Row>() .RuleFor(x => x.Key, x => id[0]++) .RuleFor(x => x.Value, x => x.PickRandom(values)); id[0] = 2; foreach (var row in generator.Generate(count)) { _left.Add(row.Key, row.Value); } id[0] = 2; foreach (var row in generator.Generate(count)) { _right.Add(row.Key, row.Value); } _watch.Stop(); Console.WriteLine("Generating Time: " + _watch.Elapsed); }
//creates and updates datatables private void makeDatatable(string responseText) { App.Current.Dispatcher.Invoke((System.Action) delegate { if (responseText != null) { JArray DataJsonArray = JArray.Parse(responseText); var DataTableList = DataJsonArray.ToObject <List <TableModel> >(); if (DataTable.Count < DataTableList.Count) { foreach (var d in DataTableList) { DataTable.Add(new TablesViewModel(d)); } } else { for (int i = 0; i < DataTable.Count; i++) { DataTable[i].UpdateWithModel(DataTableList[i]); } } } }); }
public DataTable RunAndReturnTable(string script, TSQLEngine engine) { object result = Run(script, engine); if (result is System.Collections.IEnumerable enumerableResult) { var itemType = result.GetType().GenericTypeArguments.FirstOrDefault(); List <DataColumn> columns2 = new List <DataColumn>(); int index = 0; foreach (var field in itemType.GetFields()) { columns2.Add(new DataColumn(field.Name, field.FieldType, index)); index++; } DataTable t = new DataTable("entities", columns2.ToArray()); foreach (var row in enumerableResult) { object[] values = new object[columns2.Count]; for (int i = 0; i < columns2.Count; i++) { values[i] = itemType.GetField(columns2[i].ColumnName).GetValue(row); } DataRow row2 = new DataRow(values); t.Add(row2); } return(t); } return(null); }
private void ParseDataSection([NotNull] string line) { // Check for label var labelIndex = line.IndexOf(":", StringComparison.Ordinal); if (labelIndex > -1) { var label = line.Substring(0, labelIndex); var offset = DataList.SelectMany(a => a).ToArray().Length; DataTable.Add(label, offset); if (line.Length > labelIndex + 1) { line = line.Substring(labelIndex + 1).Trim(' ', '\t'); } else { return; } } var lineSplit = line.Split(new[] { ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); if (lineSplit.Length != 2) { throw new ArgumentException("Invalid Syntax (parameter count): " + line); } var data = ParseDataDefinition(lineSplit[0], lineSplit[1]); if (data != null) { DataList.Add(data.Encode()); } }
public static DataTable MakeTable(this WebSiteDocumentsSet docSet, Dictionary <String, SpaceDocumentModel> docModels) { DataTable table = new DataTable(); table.SetTitle(docSet.name); table.SetDescription(docSet.description); DataColumn column_rank = table.Add("Nr", "Order number", "Nr", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(10); DataColumn column_domain = table.Add("Domain", "Web site domain", "Domain", typeof(String), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(40); DataColumn column_page = table.Add("Pages", "Number of pages for the website", "Pages", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(20); String g1 = "Presentation"; DataColumn column_Terms = table.Add("Terms", "Number of distinct terms", "Terms", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(20).SetGroup(g1).SetDefaultBackground("#FF6633"); DataColumn column_Tokens = table.Add("Tokens", "Total number of tokens", "Tokens", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(20).SetGroup(g1).SetDefaultBackground("#FF6633"); Int32 p = 0; Int32 c = 1; foreach (var pair in docSet) { var dr = table.NewRow(); dr[column_rank] = c; dr[column_domain] = pair.domain; dr[column_page] = pair.documents.Count; var docModel = docModels[pair.domain]; dr[column_Terms] = docModel.terms.Count; dr[column_Tokens] = docModel.terms.GetSumFrequency(); p += pair.documents.Count; c++; table.Rows.Add(dr); } table.AddExtra("Category name [" + docSet.name + "]"); table.AddExtra("Category description [" + docSet.description + "]"); table.SetAdditionalInfoEntry("Websites", docSet.Count, "Number of websites in the set"); table.SetAdditionalInfoEntry("Web pages", p, "Total count of pages"); // table.SetAdditionalInfoEntry("Total tokens", terms.GetSumFrequency(), "Total number of tokens extracted from the corpus/document, i.e. sum of all frequencies"); return(table); }
public void Do(double a, double b, double Eps = 0.001) { NumberOfIteration = 1; DataTable.ClearTable(); Data.a = a; Data.b = b; Data.difference_ab = Math.Abs(Data.b - Data.a); int n = 1; while (Data.difference_ab / Eps > F[n]) { n++; } // МЕНЯТЬ ТУТ n = n - 2 - 1; // МЕНЯТЬ ТУТ int k = 0; double temp = F[n]; double temp_2 = F[n + 2]; double temp_3 = temp / temp_2; Data.x1 = Data.a + temp_3 * Data.difference_ab; Data.x2 = Data.a + Data.b - Data.x1; Data.fx1 = Func.Value(Data.x1); Data.fx2 = Func.Value(Data.x2); NumberOfIterationsObjectiveFunction = 1; while (Math.Abs(Data.b - Data.a) > Eps) { if (Data.fx1 < Data.fx2) { Data.b = Data.x2; Data.x2 = Data.x1; Data.fx2 = Data.fx1; temp = F[n - k + 1]; temp_2 = F[n - k + 3]; temp_3 = temp / temp_2; Data.x1 = Data.a + temp_3 * Math.Abs(Data.b - Data.a); Data.fx1 = Func.Value(Data.x1); } else { Data.a = Data.x1; Data.x1 = Data.x2; Data.fx1 = Data.fx2; temp = F[n - k + 2]; temp_2 = F[n - k + 3]; temp_3 = temp / temp_2; Data.x2 = Data.a + temp_3 * Math.Abs(Data.b - Data.a); Data.fx2 = Func.Value(Data.x2); } k++; NumberOfIteration++; DataTable.Add(Data.x1, Data.x2, Data.fx1, Data.fx2, Data.a, Data.b); NumberOfIterationsObjectiveFunction++; } }
public override DataTable buildTableShema(DataTable output) { output.Add(termTableColumns.termName, "Nominal form of the term", "T_n", typeof(string), dataPointImportance.normal); output.Add(termTableColumns.freqAbs, "Absolute frequency - number of occurences", "T_af", typeof(int), dataPointImportance.normal, "", "Abs. freq."); output.Add(termTableColumns.freqNorm, "Normalized frequency - abs. frequency divided by the maximum", "T_nf", typeof(double), dataPointImportance.important, "#0.00000"); output.Add(termTableColumns.df, "Document frequency - number of documents containing the term", "T_df", typeof(int), dataPointImportance.normal); output.Add(termTableColumns.idf, "Inverse document frequency - logaritmicly normalized T_df", "T_idf", typeof(double), dataPointImportance.normal, "#0.00000"); output.Add(termTableColumns.tf_idf, "Term frequency Inverse document frequency - calculated as TF-IDF", "T_tf-idf", typeof(double), dataPointImportance.important, "#0.00000"); output.Add(termTableColumns.words, "Number of words in the expanded term", "T_c", typeof(int), dataPointImportance.normal, ""); // , "Cumulative weight of term", "T_cw", typeof(Double), dataPointImportance.normal, "#0.00000"); output.Add(termTableColumns.cw, "Cumulative weight of all TermInstance-s of the term spark that were found in the query", "T_cw", typeof(double), dataPointImportance.normal, "#0.00000"); output.Add(termTableColumns.ncw, "Normalized cumulative weight of term", "T_ncw", typeof(double), dataPointImportance.important, "#0.00000"); return(output); }
public void Do2(double ac, double bc, double Eps = 0.001) { double a = ac, b = bc; NumberOfIteration = 1; DataTable.ClearTable(); NumberOfIterationsObjectiveFunction = 0; int i = 1; int k = 0; double res = (b - a) / Eps; while (res >= F[i]) { i++; } i = i - 2 - 1; double temp = F[i]; double temp_2 = F[i + 2]; double temp_3 = temp / temp_2; double x1 = a + temp_3 * (b - a); double x2 = a + b - x1; double s; double f1 = Func.Value(x1); double f2 = Func.Value(x2); while (Math.Abs(b - a) > Eps) { if (f1 < f2) { b = x2; x2 = x1; f2 = f1; temp = F[i - k + 1]; temp_2 = F[i - k + 3]; temp_3 = temp / temp_2; x1 = a + temp_3 * (b - a); f1 = Func.Value(x1); } else { a = x1; x1 = x2; f1 = f2; temp = F[i - k + 2]; temp_2 = F[i - k + 3]; temp_3 = temp / temp_2; x2 = a + temp_3 * (b - a); f2 = Func.Value(x2); } k++; NumberOfIteration++; DataTable.Add(x1, x2, f1, f2, a, b); NumberOfIterationsObjectiveFunction++; } }
protected override void DataTableAlloc() { base.DataTableAlloc(); if (DataTable != null) { DataTable.Add(Name, typeof(string), Data.LDVarClass.OutFunction, _Code); } }
protected override void DataTableAlloc() { base.DataTableAlloc(); if (DataTable != null && !string.IsNullOrEmpty(_Limit)) { DataTable.Add(_Limit, typeof(short)); } }
public void Add(DataTable.DataItem target, Tree.Node equation) { d_items.Add(new Item(target, equation)); // Add row to index table d_indextable.Add(new Index((ulong)target.DataIndex, target)); d_indextable.IntegerTypeSize = (ulong)target.DataIndex; foreach (Tree.Embedding.Argument arg in d_function.OrderedArguments) { Tree.Node subnode = equation.FromPath(arg.Path); DataTable.DataItem it = d_program.StateTable[subnode]; d_indextable.Add(new Index((ulong)it.DataIndex, it)).Type = (it.Type | DataTable.DataItem.Flags.Index); d_indextable.IntegerTypeSize = (ulong)it.DataIndex; } }
public void Compare_LeftAdditionalRow_Result_LeftOnly_Has1() { _left.Add(10, "Test"); var res = _comparer.Compare(_left, _right); res.LeftOnly.Should().HaveCount(1); res.LeftOnly.Single().Key().Should().Be(10); }
public override void InjectData(object obj) { IList <T> oDatas = obj as IList <T>; for (int i = 0; i < oDatas.Count; i++) { T oData = oDatas[i]; DataTable.Add(oData.Guid, oData); } }
protected override void DataTableAlloc() { base.DataTableAlloc(); if (DataTable != null) { DataTable.Add(FullName + "_READ", typeof(short), Data.LDVarClass.Simulator); DataTable.Add(FullName + "_INPUT", typeof(Int16), Data.LDVarClass.Simulator); } }
protected override void DataTableAlloc() { base.DataTableAlloc(); if (!byte.TryParse(_DudyCycle, out temp) && !string.IsNullOrEmpty(_DudyCycle)) { if (DataTable != null) { DataTable.Add(_DudyCycle, typeof(byte)); } } }
private DataTable ConvertHashtableToDataTable(Hashtable table, ref DataTable dictionary) { if (table != null) { foreach (string key in table.Keys) { dictionary.Add(key, table[key]); } } return(dictionary); }
/// <summary> /// Gets the data table with complete pair collection /// </summary> /// <returns></returns> public DataTable GetDataTable() { DataTable output = new DataTable("MatchedTerms"); output.Add("T", "Matched Term", "T", typeof(String), imbSCI.Core.enums.dataPointImportance.normal, "", "Term"); output.Add("IDFq", "Term IDF at case/query", "IDF_qi", typeof(Double), imbSCI.Core.enums.dataPointImportance.normal, "F2", "IDF qi"); output.Add("IDFi", "Term IDF at at class/document", "IDF_ci", typeof(Double), imbSCI.Core.enums.dataPointImportance.normal, "F2", "IDF ci"); output.Add("Wqi", "Term weight at case/query", "W_qi", typeof(Double), imbSCI.Core.enums.dataPointImportance.normal, "F5", "Term W_qi"); output.Add("Wci", "Term weight at class/document", "W_ci", typeof(Double), imbSCI.Core.enums.dataPointImportance.normal, "F5", "Term W_ci"); output.Add("Pw", "Term pair weight-factor", "P_w", typeof(Double), imbSCI.Core.enums.dataPointImportance.normal, "F5", "Pair W"); foreach (webLemmaTermPair pair in this) { var dr = output.NewRow(); dr["T"] = pair.entryA.nominalForm; dr["IDFq"] = pair.entryA.documentFrequency; dr["IDFi"] = pair.entryA.documentFrequency; dr["Wqi"] = pair.entryA.weight; dr["Wci"] = pair.entryB.weight; dr["Pw"] = pair.factor; output.Rows.Add(dr); } output.AddExtra("Total pairs: " + this.Count); return(output); }
/// <summary> /// Makes ranked table with term frequencies /// </summary> /// <param name="terms">The terms.</param> /// <param name="name">The name.</param> /// <param name="description">The description.</param> /// <param name="limit">The limit.</param> /// <returns></returns> public static DataTable MakeTable(this TokenDictionary terms, string name, string description, Int32 limit = 1000) { DataTable table = new DataTable(); table.SetTitle(name); table.SetDescription(description); table.SetAdditionalInfoEntry("Dictinct terms", terms.Count, "Total distinct terms in the dictionary"); table.SetAdditionalInfoEntry("Max frequency", terms.GetMaxFrequency(), "Highest frequency"); table.SetAdditionalInfoEntry("Total tokens", terms.GetSumFrequency(), "Total number of tokens extracted from the corpus/document, i.e. sum of all frequencies"); DataColumn column_rank = table.Add("Rank", "Rank by frequency", "R", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(20); DataColumn column_id = table.Add("ID", "Token ID", "id", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(20); DataColumn column_token = table.Add("Token", "Token", "t", typeof(String), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(50); DataColumn column_freq = table.Add("Frequency", "Absolute number of token occurrences in the corpus/document", "TF", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(30); // var tokens = terms.GetTokens(); var list = terms.GetRankedTokenFrequency(limit); Int32 c = 1; foreach (var pair in list) { var dr = table.NewRow(); dr[column_rank] = c; dr[column_id] = terms.GetTokenID(pair.Key); dr[column_token] = pair.Key; dr[column_freq] = pair.Value; c++; table.Rows.Add(dr); } if (terms.Count > limit) { table.AddExtra("Table contains only top [" + limit + "] entries, out of [" + terms.Count + "] enumerated in the dictionary"); } return(table); }
/// <summary> /// Gets the data table of all targets /// </summary> /// <returns></returns> public DataTable GetDataTable() { DataTable output = new DataTable(wRecord.domain.getFilename()); output.Add(nameof(spiderTarget.url), "Resolved url"); output.Add("VectorIn", "Number of incoming links", "Vin_c", typeof(int)); output.Add("VectorOut", "Number of outcoming links", "Vout_c", typeof(int)); output.Add(nameof(spiderPage.iterationDiscovery), "Iteration discovery", "I_d", typeof(int)); output.Add(nameof(spiderPage.iterationLoaded), "Iteration loaded", "I_l", typeof(int)); output.Add(nameof(spiderPage.contentHash), "Content hash", "H"); foreach (spiderTarget target in items.Values) { if (target.page != null) { output.AddRow(target.url, target.linkVectors.Count(), target.page.relationship.outflowLinks.Count(), target.iterationDiscovery, target.iterationLoaded, target.page.contentHash); } else { output.AddRow(target.url, target.linkVectors.Count(), 0, target.iterationDiscovery, target.iterationLoaded, ""); } } return(output); }
protected override void DataTableAlloc() { base.DataTableAlloc(); if (DataTable != null) { DataTable.Add(FullName, DataType, VarClass); if (DefaultValue != null) { DataTable.SetValue(FullName, DefaultValue); } } }
private static void AddColumn( DataTable table, AsyncDataViewModelPreset preset, ColumnViewModelPreset columnPreset, DataColumn column) { column.Id = columnPreset.Id; column.Name = columnPreset.Name; column.Width = columnPreset.Width; column.IsVisible = columnPreset.IsVisible; column.IsResizable = true; column.TextAlignment = columnPreset.TextAlignment; preset.ConfigurableColumns.Add(columnPreset); table.Add(column); }
private TimeTable() { timeTables = new Dictionary <string, IListing>() { { nameof(rooms), rooms = new DataTable(nameof(rooms)) }, { nameof(groups), groups = new DataTable(nameof(groups)) }, { nameof(classes), classes = new DataTable(nameof(classes)) }, { nameof(teachers), teachers = new DataTable(nameof(teachers)) }, { nameof(activities), activities = new ActivityListing() } }; // Default Values rooms.Add("110"); rooms.Add("111"); rooms.Add("120"); rooms.Add("121"); groups.Add("1a"); groups.Add("1b"); groups.Add("1c"); groups.Add("2a"); groups.Add("2b"); groups.Add("3a"); groups.Add("3b"); groups.Add("4a"); groups.Add("4b"); classes.Add("mat"); classes.Add("geo"); classes.Add("eng"); classes.Add("phys"); classes.Add("biol"); teachers.Add("kowalski"); teachers.Add("nowak"); teachers.Add("smith"); teachers.Add("clarkson"); teachers.Add("may"); teachers.Add("hammond"); teachers.Add("atkinson"); }
/// <summary> /// Makes the table. /// </summary> /// <typeparam name="TNodeA">The type of the node a.</typeparam> /// <typeparam name="TNodeB">The type of the node b.</typeparam> /// <param name="relationship">The relationship.</param> /// <param name="name">The name.</param> /// <param name="description">The description.</param> /// <returns></returns> public static DataTable MakeTable <TNodeA, TNodeB>(this Relationships <TNodeA, TNodeB> relationship, String name, String description) where TNodeA : IObjectWithName where TNodeB : IObjectWithName { DataTable table = new DataTable(); table.SetTitle(name); table.SetDescription(description); DataColumn column_rank = table.Add("Nr", "Order number", "Nr", typeof(Int32), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(10); DataColumn column_nodeA = table.Add("A", "Node [" + typeof(TNodeA).Name + "] A", "A", typeof(String), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(30); DataColumn column_nodeB = table.Add("B", "Node [" + typeof(TNodeB).Name + "] B", "B", typeof(String), imbSCI.Core.enums.dataPointImportance.normal).SetWidth(30); DataColumn column_weight = table.Add("W", "Weight og the relationship", "W", typeof(Double), imbSCI.Core.enums.dataPointImportance.normal, "F2").SetWidth(20); Int32 p = 0; Int32 c = 1; foreach (var pair in relationship.links) { var dr = table.NewRow(); dr[column_rank] = c; dr[column_nodeA] = pair.NodeA.name; dr[column_nodeB] = pair.NodeB.name; dr[column_weight] = pair.weight; c++; table.Rows.Add(dr); } table.AddExtra("Type of node A [" + typeof(TNodeA).Name + "]"); table.AddExtra("Type of node B [" + typeof(TNodeB).Name + "]"); return(table); }
public DataTable GetDataTableFromCsv(string pathToCsvFile) { var dataTable = new DataTable(); using (var csvReader = new CsvReader(new StreamReader(pathToCsvFile))) { while (csvReader.Read()) { var dr = new DataRow(); for (int i = 0; i < csvReader.FieldHeaders.Length; i++) { dr.Add(csvReader.FieldHeaders[i], csvReader.GetField(i)); } dataTable.Add(dr); } } return dataTable; }