static void Main(string[] args) { string sourcepath = args[0]; // there are a few static utility functions Console.WriteLine("The given DAT file has {0} rows.", DelimitedFile.GetRowCount(sourcepath)); Console.WriteLine("The first row: {0}", DelimitedFile.GetFirstRow(sourcepath)); // initialize the reader, given the file to read from and that this file a header row DelimitedFile file = new DelimitedFile(sourcepath, true); // set delimiter values (these are the defaults) file.NewlineCharacter = 10; file.TextQualifierCharacter = 254; file.CarriageReturnCharacter = 13; file.ColumnDelimiterCharacter = 20; // read in and handle an available line from the file while (file.ReadRow()) { if (file.CurrentRowNumber == 1) { Console.WriteLine("Column headers: {0}", string.Join(", ", file.CurrentRow.Columns)); continue; } else { if (file.SetColumnValue("text", "example")) { Console.WriteLine(file.GetColumnValue("text")); } } } }
public void Test1() { IList <DelimitedFileInfo> currentInventoryFileNames = new List <DelimitedFileInfo>() { new DelimitedFileInfo($"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\products.test.csv", "sku", "^\\w{4}\\d{8}") }; DelimitedFile newInventoryFile = new DelimitedFile(); foreach (DelimitedFileInfo file in currentInventoryFileNames) { newInventoryFile.LoadFileLines(file.FullName, null, null, null); } StringBuilder sb = new StringBuilder(); foreach (DelimitedFileRow row in newInventoryFile) { foreach (DelimitedFileColumn column in row) { Console.WriteLine($"Name: {column.Name}, {column.Value}"); } } Assert.Pass(); }
//This will calculate the errors //first it must ask what column to use as a label private void externalEvalCalculateButton_Click(object sender, EventArgs e) { //start by parsing label file DelimitedFile delimitedLabelFile = new DelimitedFile(externalEvalLabelText.Text); int labelCol = Prompt.ShowDialog("Enter the Column to use", "Select Attribute", 1, delimitedLabelFile.Data[0].Length); LabelList labels = new LabelList(delimitedLabelFile.GetColumn(labelCol - 1)); //get the Partion file Partition clusterFile = new Partition(externalEvalClusterText.Text); int countOfPoints = clusterFile.DataCount; //create a count mapping //[actual cluster label, number in found clusters] int[,] clusterMatching = new int[labels.UniqueLabels.Count, clusterFile.Clusters.Count]; foreach (Cluster c in clusterFile.Clusters) { foreach (ClusteredItem k in c.Points) { int actualMatching = labels.LabelIndices[k.Id]; int foundMatching = k.ClusterId; clusterMatching[actualMatching, foundMatching]++; } } //One-To-One Mapping like Darla's String greedyError = ExternalEval.GreedyErrorEval(clusterFile, labels, clusterMatching); externalEvalResultText.Text = greedyError; }
public LabeledPointSet(String filename, int labelIndex) { DelimitedFile file = new DelimitedFile(filename); Labels = new LabelList(file, labelIndex); Points = new PointSet(file.RemoveColumns(labelIndex)); }
public static void readTestFile(DelimitedFile df) { df.readFile(); Console.WriteLine("Headers" + "\n"); String[] headers = df.headersInOrder(); Console.WriteLine( getHeaders(headers, df.delimiter) ); /* for(int i = 0; i < headers.Length; ++i) { String s = headers[i]; if(i < headers.Length - 1) Console.WriteLine(s + df.delimiter); else Console.WriteLine(s); } */ /* String[] fileContents = df.fileContentsInOrder(); foreach (String s in fileContents) { Console.WriteLine(s); } */ }
/// <summary> /// Accepts a file and parses it as a distance matrix /// </summary> /// <param name="filename">Path to the delimited matrix</param> public DistanceMatrix(String filename) : base(DataType.DistanceMatrix) { DelimitedFile parsedFile = new DelimitedFile(filename); //parse each line int dimensions = parsedFile.Data.Count; if (dimensions == 0) { throw new InvalidDataException("Empty matrix"); } int numCols = parsedFile.Data[0].Length; if (numCols != dimensions) { throw new InvalidDataException("Non-Square Matrix"); } Distances = new double[dimensions, dimensions]; for (int r = 0; r < dimensions; r++) { for (int c = 0; c < dimensions; c++) { Distances[r, c] = double.Parse(parsedFile.Data[r][c]); } } }
public LabeledPointSet(String filename, LabelLocation labelLocation) { DelimitedFile file = new DelimitedFile(filename); int labelIndex = 0; if (labelLocation == LabelLocation.LastColumn) { labelIndex = file.Data[0].Length - 1; } Labels = new LabelList(file, labelIndex); Points = new PointSet(file.RemoveColumns(labelIndex)); }
public static double RandIndex(String labelFile, String clusterFileName) { //CALCULATING THE RAND INDEX //start by parsing label file DelimitedFile delimitedLabelFile = new DelimitedFile(labelFile); int labelCol = delimitedLabelFile.Data[0].Length; LabelList labels = new LabelList(delimitedLabelFile.GetColumn(labelCol - 1)); //get the Partion file Partition clusterFile = new Partition(clusterFileName); int[] assignments = new int[labels.LabelIndices.Length]; for (int cluster = 0; cluster < clusterFile.Clusters.Count; cluster++) { for (int j = 0; j < clusterFile.Clusters[cluster].Points.Count; j++) { int clusterid = clusterFile.Clusters[cluster].Points[j].ClusterId; int id = clusterFile.Clusters[cluster].Points[j].Id; assignments[id] = clusterid; } } // compare two arrays, assigments and labels.LabelIndices int a = 0; int b = 0; for (int i = 0; i < assignments.Length; i++) { for (int j = i + 1; j < assignments.Length; j++) { //Check for case a -> i and j are in same cluster in assignments and LabelIndices if (labels.LabelIndices[i] == labels.LabelIndices[j] && assignments[i] == assignments[j]) { a++; } else if (labels.LabelIndices[i] != labels.LabelIndices[j] && assignments[i] != assignments[j]) { b++; } } } int denominator = assignments.Length * (assignments.Length - 1) / 2; double randIndex = (a + b) / (double)denominator; //return "Group A: " + a + " Group B: " + b + " RandIndex: " + randIndex; return(randIndex); }
//Inputs and outputs the file without any alterations public static void test1() { String fileName1R = @"C:\WFMCustomAccelerator\PB Accelerator Files\Test Files\TestFileR1.csv"; String fileName1W = @"C:\WFMCustomAccelerator\PB Accelerator Files\Test Files\TestFileW1.csv"; DelimitedFile df1 = new DelimitedFile(fileName1R, ","); Console.WriteLine("Reading File=" + fileName1R); readTestFile(df1); df1.fileName = fileName1W; writeTestFile(df1); Console.WriteLine(lastMsg); Console.ReadLine(); }
//This will calculate the errors //first it must ask what column to use as a label private void externalEvalCalculateButton_Click(object sender, EventArgs e) { //start by parsing label file DelimitedFile delimitedLabelFile = new DelimitedFile(externalEvalLabelText.Text); int labelCol = Prompt.ShowDialog("Enter the Column to use", "Select Attribute", 1, delimitedLabelFile.Data[0].Length); LabelList labels = new LabelList(delimitedLabelFile.GetColumn(labelCol - 1)); //get the Partion file Partition clusterFile = new Partition(externalEvalClusterText.Text); //Calculate the Error ExternalEval error = new ExternalEval(clusterFile, labels); externalEvalResultText.Text = error.TextResults; }
public PointSet(DelimitedFile parsedFile) { PointList = new List <KPoint>(); var numAttributes = parsedFile.Data[0].Length; foreach (var stringArray in parsedFile.Data) { if (stringArray.Length != numAttributes) { throw new InvalidDataException("Non-Constant number of attributes"); } double[] points = stringArray.Select(double.Parse).ToArray(); PointList.Add(new KPoint(points)); } Dimensions = PointList[0].Dimensions; }
/// <summary> /// Creates a LabelList /// </summary> /// <param name="f">File holding the label columns</param> /// <param name="col">0-Based index of the label column</param> public LabelListOverlapping(DelimitedFile f) { LabelIndices = new List <List <int> >(); UniqueLabels = new List <string>(); for (int i = 0; i < f.Data.Count; i++) { List <int> sublist = new List <int>(); for (int j = 1; j < f.Data[i].Length; j++) { if (!UniqueLabels.Contains(f.Data[i][j])) { UniqueLabels.Add(f.Data[i][j]); } sublist.Add(UniqueLabels.IndexOf(f.Data[i][j])); } LabelIndices.Add(sublist); } }
public static double Purity(String labelFile, String clusterFileName) { //start by parsing label file DelimitedFile delimitedLabelFile = new DelimitedFile(labelFile); int labelCol = delimitedLabelFile.Data[0].Length; LabelList labels = new LabelList(delimitedLabelFile.GetColumn(labelCol - 1)); //get the Partion file Partition clusterFile = new Partition(clusterFileName); int[] majority = new int[clusterFile.Clusters.Count]; for (int cluster = 0; cluster < clusterFile.Clusters.Count; cluster++) { int[] assignments = new int[labels.UniqueLabels.Count]; for (int j = 0; j < clusterFile.Clusters[cluster].Points.Count; j++) { int clusterid = clusterFile.Clusters[cluster].Points[j].ClusterId; int id = clusterFile.Clusters[cluster].Points[j].Id; assignments[labels.LabelIndices[id]]++; } // now find the max of assignments int maxAssign = 0; for (int k = 0; k < assignments.Length; k++) { if (assignments[k] > maxAssign) { maxAssign = assignments[k]; } } majority[cluster] = maxAssign; } // add up majority[] and divide by number of vertices int total = 0; for (int i = 0; i < majority.Length; i++) { total += majority[i]; } return((double)total / labels.LabelIndices.Length); }
private void button19_Click_1(object sender, EventArgs e) { int somDim = (int)somWidth.Value; NetMining.Data.PointSet data = new PointSet(textBox4.Text); HexagonalSelfOrganizingMap hSOM = new HexagonalSelfOrganizingMap(data, somDim, 0.3); hSOM.runLargeEpochs(0.2, 1); hSOM.runLargeEpochs(0.05, 2); hSOM.runLargeEpochs(0.01, 2); //hSOM.runLargeEpochs(0.01, 4); //hSOM.runLargeEpochs(0.005, 6); //Setup out labels DelimitedFile f = new DelimitedFile(SOMLabelTextbox.Text); int labelIndex = Prompt.ShowDialog("Select Label Index", "Label File", 1, f.Data[0].Length) - 1; String[] labels = f.GetColumn(labelIndex); //Now build our array of indicies List <String> labelNames = new List <string>(); int[] labelIndexArr = new int[data.Count]; for (int i = 0; i < data.Count; i++) { if (!labelNames.Contains(labels[i])) { labelNames.Add(labels[i]); } labelIndexArr[i] = labelNames.IndexOf(labels[i]); } var bmp = hSOM.GetUMatrix(10, labelNames.Count, labelIndexArr); bmp[0].Save("test" + somDim.ToString() + ".bmp"); bmp[1].Save("count" + somDim.ToString() + ".bmp"); for (int i = 2; i < bmp.Count; i++) { bmp[i].Save("count" + somDim.ToString() + "class_" + (i - 1).ToString() + ".bmp"); } MessageBox.Show("Done!"); }
public object Parse(string value, DelimitedFile config) { return TransactionType.GetTransactionType(value); }
//Outputs same file with a different delimiter public static void test2() { String fileName2R = @"C:\WFMCustomAccelerator\PB Accelerator Files\Test Files\TestFileR1.csv"; String fileName2W = @"C:\WFMCustomAccelerator\PB Accelerator Files\Test Files\TestFileW2.csv"; DelimitedFile df2 = new DelimitedFile(fileName2R, ","); Console.WriteLine("Reading File=" + fileName2R); readTestFile(df2); df2.fileName = fileName2W; df2.delimiter = "|"; writeTestFile(df2); Console.WriteLine(lastMsg); Console.ReadLine(); }
public MainWindow() { InitializeComponent(); IList <DelimitedFileInfo> lastestProductFiles = new List <DelimitedFileInfo>() { new DelimitedFileInfo($"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\products.latest.csv", "sku", "^\\w{4}\\d{8}") }; IList <DelimitedFileInfo> oldProductFiles = new List <DelimitedFileInfo>() { //new DelimitedFileInfo("C:\\Users\\pawil\\Downloads\\product.files.comparison\\products_export_1.csv", // "Variant SKU", @"(^[0-9]{10,}[a-z0-9]{10,})"), //new DelimitedFileInfo("C:\\Users\\pawil\\Downloads\\product.files.comparison\\products_export_2.csv", // "Variant SKU", @"(^[0-9]{10,}[a-z0-9]{10,})") new DelimitedFileInfo($"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\products.old.csv", "sku", "^\\w{4}\\d{8}") }; DelimitedFile oldProducts = new DelimitedFile(); foreach (DelimitedFileInfo file in oldProductFiles) { oldProducts.LoadFileLines(file.FullName, HeaderNames, KeyName); } DelimitedFile latestProducts = new DelimitedFile(); foreach (DelimitedFileInfo file in lastestProductFiles) { latestProducts.LoadFileLines(file.FullName, HeaderNames, KeyName); } // Look for new products. These will be products // that exist in the new file but not the old file. IList <DelimitedFileRow> newProducts = DelimitedFile.Compare(latestProducts, oldProducts); if (newProducts.Count > 0) { DelimitedFile newProductsFile = new DelimitedFile(); newProductsFile.AddLines(newProducts); newProductsFile.Save("", $"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\new.product.sku.csv"); } // Look for discontinued products. These will be products // that exist in the old file but not the new file. IList <DelimitedFileRow> discontinuedProducts = DelimitedFile.Compare(oldProducts, latestProducts); if (discontinuedProducts.Count > 0) { DelimitedFile discontinuedProductsFile = new DelimitedFile(); discontinuedProductsFile.AddLines(discontinuedProducts); discontinuedProductsFile.Save("", $"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\discontinued.product.sku.csv"); } MessageBox.Show($"Completed Processing the Files and Found {newProducts.Count} new product SKUs and {discontinuedProducts.Count} discontinued product SKUs."); Application.Current.Shutdown(); }
public object Parse(string value, DelimitedFile config) { return value; }
public static void writeTestFile(DelimitedFile df) { df.writeFile(); }
public object Parse(string value, DelimitedFile config) { return DateTime.ParseExact(value, config.TimeStampFormat, CultureInfo.InvariantCulture); }
public MainWindow() { InitializeComponent(); IList <DelimitedFileInfo> currentInventoryFileNames = new List <DelimitedFileInfo>() { new DelimitedFileInfo($"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\products_shopify_inventory.latest.csv", "sku", "^\\w{4}\\d{8}") }; IList <DelimitedFileInfo> oldInventoryFileNames = new List <DelimitedFileInfo>() { new DelimitedFileInfo($"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\products_shopify_inventory.old.csv", "sku", "^\\w{4}\\d{8}") }; DelimitedFile oldInventoryFile = new DelimitedFile(); foreach (DelimitedFileInfo file in oldInventoryFileNames) { oldInventoryFile.LoadFileLines(file.FullName, HeaderNames, file.RegExLineIdentifierPattern, KeyName); } DelimitedFile newInventoryFile = new DelimitedFile(); foreach (DelimitedFileInfo file in currentInventoryFileNames) { newInventoryFile.LoadFileLines(file.FullName, HeaderNames, file.RegExLineIdentifierPattern, KeyName); } IList <DelimitedFileRow> changedProductInventoryQts = DelimitedFile.Compare(oldInventoryFile, newInventoryFile); if (changedProductInventoryQts.Count > 0) { IList <DelimitedFileRow> inventoryRows = new List <DelimitedFileRow>(); foreach (DelimitedFileRow data in changedProductInventoryQts) { DelimitedFileColumn keyColumn = data[KeyName]; String keyNameValue = DelimitedFile.GetKey(keyColumn.Value.ToString()); var oldCSVData = oldInventoryFile.GetRowByKeyName(keyNameValue); //.Query(csvData => csvData.GetColumnValue(KeyName) == data.GetColumnValue(KeyName)).FirstOrDefault(); var newCSVData = newInventoryFile.GetRowByKeyName(keyNameValue); //.Query(csvData => csvData.GetColumnValue(KeyName) == data.GetColumnValue(KeyName)).FirstOrDefault(); if (!(oldCSVData is null) && !(newCSVData is null)) { DelimitedFileRow inventoryRow = new DelimitedFileRow(); inventoryRow.CreateColumn(KeyName, data.GetColumnValue(KeyName), true); inventoryRow.CreateColumn("BEFORE", oldCSVData.GetColumnValue(QtyFieldName)); inventoryRow.CreateColumn("AFTER", newCSVData.GetColumnValue(QtyFieldName)); inventoryRows.Add(inventoryRow); } } DelimitedFile changedInventoryQtysFile = new DelimitedFile(); changedInventoryQtysFile.AddLines(inventoryRows); changedInventoryQtysFile.Save("sku,before,after", $"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\changed.inventory.qtys.csv"); } MessageBox.Show($"Completed Processing the Files and Found {changedProductInventoryQts.Count} Products that have changed inventory quantities SKUs."); Application.Current.Shutdown(); }
public object Parse(string value, DelimitedFile config) { return Decimal.Parse(value); }
public static String CheckForNoise(String labelFile, String clusterFileName) { // need to calculate ns, ms and cs, as described in Yang and Leskovec ICDM2012 //start by parsing label file DelimitedFile delimitedLabelFile = new DelimitedFile(labelFile); int labelCol = delimitedLabelFile.Data[0].Length; LabelList labels = new LabelList(delimitedLabelFile.GetColumn(labelCol - 1)); //get the Partion file Partition clusterFile = new Partition(clusterFileName); int[] assignments = new int[labels.LabelIndices.Length]; // initialize assignments array to -1 // ultimately, nodes that have been removed as part of a critical attack set will stay at -1 assignment for (int i = 0; i < assignments.Length; i++) { assignments[i] = -1; } int noiseThreshold; //if (assignments.Length == 550) noiseThreshold = 500; //else if (assignments.Length == 770) noiseThreshold = 700; //else noiseThreshold = 1100; if (assignments.Length == 220) { noiseThreshold = 200; } else if (assignments.Length == 440) { noiseThreshold = 400; } else { noiseThreshold = 800; } for (int cluster = 0; cluster < clusterFile.Clusters.Count; cluster++) { for (int j = 0; j < clusterFile.Clusters[cluster].Points.Count; j++) { int clusterid = clusterFile.Clusters[cluster].Points[j].ClusterId; int id = clusterFile.Clusters[cluster].Points[j].Id; assignments[id] = clusterid; } } int[] ns = new int[clusterFile.Clusters.Count]; int[] ms = new int[clusterFile.Clusters.Count]; int[] cs = new int[clusterFile.Clusters.Count]; Boolean[] isAllNoise = new Boolean[clusterFile.Clusters.Count]; // if we're doing this without reassign, we need new nodes and edges valuse int edges = 0; int nodes = 0; for (int cluster = 0; cluster < clusterFile.Clusters.Count; cluster++) { ns[cluster] = clusterFile.Clusters[cluster].Points.Count; isAllNoise[cluster] = true; for (int j = 0; j < clusterFile.Clusters[cluster].Points.Count; j++) // for each vertex in this cluster { nodes++; if (clusterFile.Clusters[cluster].Points[j].Id < noiseThreshold) { isAllNoise[cluster] = false; } for (int k = 0; k < clusterFile.Graph.Nodes[clusterFile.Clusters[cluster].Points[j].Id].Edge.Length; k++) // for each edge k adjacent to j { edges++; int edge = clusterFile.Graph.Nodes[clusterFile.Clusters[cluster].Points[j].Id].Edge[k]; if (cluster == assignments[edge]) { ms[cluster]++; //if (cluster == 7) Console.WriteLine("ms " + edge); } else { cs[cluster]++; //if (cluster == 7) Console.WriteLine("cs " + edge); } } } } String report = ""; double[] internalDensity = new double[clusterFile.Clusters.Count]; double[] averageDegree = new double[clusterFile.Clusters.Count]; double[] expansion = new double[clusterFile.Clusters.Count]; double[] cutRatio = new double[clusterFile.Clusters.Count]; double[] conductance = new double[clusterFile.Clusters.Count]; double[] separability = new double[clusterFile.Clusters.Count]; double WAinternalDensity = 0; double WAaverageDegree = 0; double WAexpansion = 0; double WAcutRatio = 0; double WAconductance = 0; double WAseparability = 0; for (int cluster = 0; cluster < clusterFile.Clusters.Count; cluster++) { double totalPossibleInternalEdges = ((ns[cluster] * (ns[cluster] - 1)) / 2); internalDensity[cluster] = totalPossibleInternalEdges == 0 ? 0 : (double)ms[cluster] / totalPossibleInternalEdges; averageDegree[cluster] = 2.0 * ms[cluster] / ns[cluster]; expansion[cluster] = (double)cs[cluster] / ns[cluster]; cutRatio[cluster] = (double)cs[cluster] / (ns[cluster] * (assignments.Length - ns[cluster])); conductance[cluster] = (double)cs[cluster] / (2 * ms[cluster] + cs[cluster]); separability[cluster] = (double)ms[cluster] / cs[cluster]; } for (int cluster = 0; cluster < clusterFile.Clusters.Count; cluster++) { WAinternalDensity += internalDensity[cluster] * ns[cluster]; WAaverageDegree += averageDegree[cluster] * ns[cluster]; WAexpansion += expansion[cluster] * ns[cluster]; WAcutRatio += cutRatio[cluster] * ns[cluster]; WAconductance += conductance[cluster] * ns[cluster]; WAseparability += separability[cluster] * ns[cluster]; } WAinternalDensity /= (double)nodes; WAaverageDegree /= (double)nodes; WAexpansion /= (double)nodes; WAcutRatio /= (double)nodes; WAconductance /= (double)nodes; WAseparability /= (double)nodes; for (int cluster = 0; cluster < clusterFile.Clusters.Count; cluster++) { report += clusterFileName.Substring(clusterFileName.LastIndexOf('\\') + 1) + "," + cluster + "," + (isAllNoise[cluster] ? 1 : 0) + "," + ns[cluster] + "," + ms[cluster] + "," + cs[cluster] + "," + internalDensity[cluster] + "," + internalDensity.Min() + "," + WAinternalDensity + "," + internalDensity.Max() + //"," + averageDegree[cluster] + "," + averageDegree.Min() + "," + averageDegree.Average() + "," + averageDegree.Max() + "," + averageDegree[cluster] + "," + averageDegree.Min() + "," + WAaverageDegree + "," + averageDegree.Max() + "," + expansion[cluster] + "," + expansion.Min() + "," + WAexpansion + "," + expansion.Max() + "," + cutRatio[cluster] + "," + cutRatio.Min() + "," + WAcutRatio + "," + cutRatio.Max() + "," + conductance[cluster] + "," + conductance.Min() + "," + WAconductance + "," + conductance.Max() + "," + separability[cluster] + "," + separability.Min() + "," + WAseparability + "," + separability.Max() + "\n"; } return(report); }
/// <summary> /// <see cref="IActivity.Run(ActivityRun,IGenevaActions)">IActivity.Run(ActivityRun,IGenevaActions)</see> /// </summary> /// <param name="activityRun">ActivityRun instance</param> /// <param name="genevaInstance">GenevaAction instance</param> public override void Run(ActivityRun activityRun, IGenevaActions genevaInstance) { //This is where the actual activity run happens. ////The name of parameters in the GS Activity String portParamName = "Portfolio"; String startDateName = "Period Start Date"; String endDateName = "Period End Date"; String rslParamName = "RSL"; //FTP Parameters ftpHandler = new FTPHandler(); String ftpServerNameParamName = "FTP Host Name"; String ftpUserNameParamName = "FTP Username"; String ftpPasswordParamName = "FTP Password"; String localFileNameFTPFormatParamName; String hostKeyNameParamName; String sshKeyParamName; String remoteInputDirParamName; //This is the file name that the FTP server requires in order for it to process the file String remoteFileNameParamName; String WinSCPPathParamName; try { activityRun.StartDateTime = DateTime.Now; //Read data from Activity Paramters String portfolio = activityRun.GetParameterValue(portParamName); String perStartDate = activityRun.GetParameterValue(startDateName); String perEndDate = activityRun.GetParameterValue(endDateName); rslName = activityRun.GetParameterValue(rslParamName); //Set FTPHandler //ftpHandler. var cultInfo = new System.Globalization.CultureInfo(System.Globalization.CultureInfo.InvariantCulture.Name); var dateTimeStyle = System.Globalization.DateTimeStyles.None; DateTime dtStartDate = DateTime.ParseExact(perStartDate, "M/d/yyyy", cultInfo, dateTimeStyle); DateTime dtEndDate = DateTime.ParseExact(perEndDate, "M/d/yyyy", cultInfo, dateTimeStyle); ReportParameterList parameters = new ReportParameterList(); parameters.Add(new ReportParameter("Portfolio", portfolio)); parameters.Add(new ReportParameter("PeriodStartDate", dtStartDate)); parameters.Add(new ReportParameter("PeriodEndDate", dtEndDate)); runRSL(activityRun, genevaInstance, parameters); extractFile = new DelimitedFile(csvOutputFileName, ","); activityRun.UpdateSuccessfulActivityRun(); } catch (Exception e) { activityRun.UpdateFailedActivityRun(e); } finally { //Set Activity End Time and Save It activityRun.EndDateTime = DateTime.Now; activityRun.Save(); } }
/// <summary> /// Creates a LabelList /// </summary> /// <param name="f">File holding the label column</param> /// <param name="col">0-Based index of the label column</param> public LabelList(DelimitedFile f, int col) : this(f.GetColumn(col)) { }
public static LightWeightGraph GetGraphFromFile(String file) { DelimitedFile parsedFile = new DelimitedFile(file, false, true); int parsedFileRowCount = parsedFile.Data.Count; //Ensure it has atleast 1 point and lists if it is weighted if (parsedFileRowCount < 2) { throw new InvalidDataException("file"); } String weightString = parsedFile.Data[0][0]; bool isWeighted = true; if (weightString == "unweighted") { isWeighted = false; } else if (weightString != "weighted") { throw new InvalidDataException("Invalid Weight Type"); } //Start parsing the file List <List <NodeWeightPair> > nodes = new List <List <NodeWeightPair> >(); for (int i = 1; i < parsedFileRowCount; i++) { var row = parsedFile.Data[i]; List <NodeWeightPair> nList = new List <NodeWeightPair>(); nodes.Add(nList); int edgeSize = (isWeighted) ? 2 : 1; for (int j = 1; j < row.Length; j += edgeSize) { int from = int.Parse(row[j]); double weight = (isWeighted) ? double.Parse(row[j + 1]) : 1.0f; nList.Add(new NodeWeightPair { Node = from, Weight = weight }); } } //Construct the graph var lwn = new LightWeightNode[nodes.Count]; List <int>[] edges = new List <int> [nodes.Count]; List <double>[] edgeWeights = new List <double> [nodes.Count]; for (int i = 0; i < nodes.Count; i++) { edges[i] = new List <int>(); edgeWeights[i] = new List <double>(); } for (int i = 0; i < lwn.Length; i++) { int count = nodes[i].Count; for (int j = 0; j < count; j++) { var a = nodes[i][j]; edges[i].Add(a.Node); edgeWeights[i].Add(a.Weight); } } for (int i = 0; i < lwn.Length; i++) { lwn[i] = new LightWeightNode(i, true, edges[i], edgeWeights[i]); } return(new LightWeightGraph(lwn, isWeighted)); }
//Outputs file with different column headings public static void test3() { String fileName3R = @"C:\WFMCustomAccelerator\PB Accelerator Files\Test Files\TestFileR1.csv"; String fileName3W = @"C:\WFMCustomAccelerator\PB Accelerator Files\Test Files\TestFileW3.csv"; DelimitedFile df3 = new DelimitedFile(fileName3R, ","); Console.WriteLine("Reading File=" + fileName3R); readTestFile(df3); String headerFileNameBefore = @"C:\WFMCustomAccelerator\PB Accelerator Files\Test Files\TestFileH3B.csv"; File.WriteAllLines(headerFileNameBefore, df3.headersInOrder()); df3.replaceColName("Ticker", "Bloomberg Ticker"); df3.replaceColName(@"Long/Short", "LS"); df3.replaceColName("Investment", "Security"); String headerFileNameAfter = @"C:\WFMCustomAccelerator\PB Accelerator Files\Test Files\TestFileH3A.csv"; File.WriteAllLines(headerFileNameAfter, df3.headersInOrder()); //Console.WriteLine( getHeaders(df3.headersInOrder(), df3.delimiter) ); df3.fileName = fileName3W; writeTestFile(df3); Console.WriteLine(lastMsg); Console.ReadLine(); }
public void Generate() { IDictionary <String, String> mapper = new Dictionary <string, string>() { { "upc", "gtin" }, { "brand", "brand" } }; IList <DelimitedFileInfo> ProductFiles = new List <DelimitedFileInfo>() { new DelimitedFileInfo($"C:\\Users\\{userprofile}\\Downloads\\product.files.comparison\\products.csv", Name, "^\\w{4}\\d{8}") }; IList <DelimitedFileInfo> GoogleFeedFiles = new List <DelimitedFileInfo>() { new DelimitedFileInfo($"C:\\Users\\{userprofile}\\Downloads\\products.tsv", KeyName, "") }; DelimitedFile ProductsFile = new DelimitedFile(); foreach (DelimitedFileInfo file in ProductFiles) { ProductsFile.LoadFileLines(file.FullName, ProductHeaderNames, file.RegExLineIdentifierPattern, Name); } DelimitedFile GoogleFeedFile = new DelimitedFile(); foreach (DelimitedFileInfo file in GoogleFeedFiles) { GoogleFeedFile.LoadFileLines(file.FullName, GoogleHeaderNames, file.RegExLineIdentifierPattern, KeyName, '\t'); } Console.WriteLine($"Missing Google Feed Titles from Products File."); foreach (String key in GoogleFeedFile.KeyNameToRowIndex.Keys) { if (ProductsFile.KeyNameToRowIndex.ContainsKey(key) == false) { Console.WriteLine($"{key}"); } } Console.WriteLine($"_____________________________________________________________________________________"); #region Process the rows in the product file extracting //Brand and UPC data to add to the existing Google Feed //rows. int googleFeedProductRowsProcessed = 0; IList <DelimitedFileRow> googleContentFeedFileRows = new List <DelimitedFileRow>(); foreach (DelimitedFileRow googleFeedFileRow in GoogleFeedFile) { DelimitedFileColumn keyColumn = googleFeedFileRow[KeyName]; String keyNameValue = DelimitedFile.GetKey(keyColumn.Value.ToString()); DelimitedFileRow productRow = ProductsFile.GetRowByKeyName(keyNameValue);//Query(x => x.GetColumnValue(Name) == keyColumn.Value); if (!(productRow is null)) { DelimitedFileRow currentProductFileRow = productRow; googleFeedFileRow[Brand] = currentProductFileRow[Brand]; googleFeedFileRow.CreateColumn(GTIN, currentProductFileRow[UPC].Value.Value); googleContentFeedFileRows.Add(googleFeedFileRow); googleFeedProductRowsProcessed++; } } #endregion #region Extract the Headers from the first row var columns = googleContentFeedFileRows.First().Where(c => c.Name != null); StringBuilder headers = new StringBuilder(); int columnIndex = 0; foreach (DelimitedFileColumn column in columns) { if (columnIndex == 0) { headers.Append(column.Name); } else { headers.Append(",").Append(column.Name); } columnIndex++; } #endregion #region Create new file for Google Feed Data //Add the updated rows to the new file and save. IList <DelimitedFile> filesToSave = new List <DelimitedFile>(); IList <DelimitedFileRow> rows = new List <DelimitedFileRow>(); int lastLineNumber = 0; foreach (DelimitedFileRow row in googleContentFeedFileRows) { if (lastLineNumber > 0 && lastLineNumber % 3500 == 0) { DelimitedFile updatedGoogleFeedFile = new DelimitedFile(); filesToSave.Add(updatedGoogleFeedFile); updatedGoogleFeedFile.AddLines(rows); lastLineNumber = 0; rows.Clear(); } else { rows.Add(row); } lastLineNumber++; } //Add remaining rows to a file. if (rows.Count > 0) { DelimitedFile updatedGoogleFeedFile = new DelimitedFile(); filesToSave.Add(updatedGoogleFeedFile); updatedGoogleFeedFile.AddLines(rows); lastLineNumber = 0; rows.Clear(); } #endregion #region Persist all files. int fileIdx = 0; foreach (DelimitedFile file in filesToSave) { file.Save(headers.ToString(), $"C:\\Users\\jwilsop1\\Downloads\\product.files.comparison\\google.feed.file_{fileIdx}.csv"); fileIdx++; } #endregion Console.WriteLine($"Completed Processing the Product Files updating {googleFeedProductRowsProcessed} records in the Google Content Feed."); Console.ReadLine(); }