public BusinessEntities.FileParseResult Parse(string FilePath) { FileParseResult _fileParseResult = new FileParseResult(); _fileParseResult.FileParseResultType = FileParseResultType.ValidFileClaim; //Logger.LogMessage(PREPROCESS_FILEPARSER.TRACE_MSG, "****WorkComp.Net : Pre Processor -> Start parsing valid AT XML File *****"); XmlReaderSettings settings = new XmlReaderSettings(); settings.ConformanceLevel = ConformanceLevel.Fragment; using (XmlReader reader = XmlReader.Create(FilePath, settings)) { try { string tranmissionsattr = string.Empty; string header = string.Empty; while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element && reader.Name.ToLower() == "transmissions") { for (int i = 0; i < reader.AttributeCount; i++) { reader.MoveToAttribute(i); tranmissionsattr = tranmissionsattr + " " + reader.Name + "=\"" + reader.GetAttribute(i) + "\""; } reader.MoveToElement(); } } } catch (Exception e) { } } return(_fileParseResult); }
private void ValidateModelNames(FileParseResult parseResult) { var supportedTypes = new List <string>(_supportedTypes); var customTypes = parseResult.Models.Select(x => x.Name); supportedTypes.AddRange(customTypes); var usedTypes = new List <string>(); foreach (var model in parseResult.Models) { var properties = model.Properties.Select(x => x.Key); usedTypes.AddRange(properties); } foreach (var controller in parseResult.Controllers) { foreach (var method in controller.Methods) { usedTypes.Add(method.ReturnedType); usedTypes.Add(method.Parameters.BodyParameter.Key); usedTypes.AddRange(method.Parameters.QueryParameters.Select(x => x.Key)); } } foreach (var type in usedTypes) { if (!supportedTypes.Contains(type)) { throw new ArgumentException($"Incorrect type: {type}!"); } } }
public void Execute(GeneratorExecutionContext context) { Debug.WriteLine("Generator Execute"); FileParseResult parseResult = ParseFiles(context); ValidateModelNames(parseResult); foreach (var controller in parseResult.Controllers) { var name = controller.Name; if (!name.EndsWith("Controller")) { name += "Controller"; } string content = GenerateController(controller); context.AddSource($"{name}.cs", content); } foreach (var model in parseResult.Models) { string content = GenerateModel(model); string name = model.Name; context.AddSource($"{name}.cs", content); } }
public static FileParseResult FilterMedian(this FileParseResult input, int treshold) { if (input == null) { return(null); } if (input.Hash.Count < treshold) { return(input); } List <KeyValuePair <int, int> > sortedFrequency = input.Frequency.ToList(); sortedFrequency.Sort( delegate(KeyValuePair <int, int> pair1, KeyValuePair <int, int> pair2) { return(pair1.Value.CompareTo(pair2.Value)); } ); int median = sortedFrequency.Select(f => f.Value).ToList().Median(); FileParseResult output = new FileParseResult { Hash = input.Hash, Frequency = new Dictionary <int, int>(), Distances = input.Distances }; for (int i = 0; i < sortedFrequency.Count; i++) { if (median < treshold || sortedFrequency[i].Value <= median) { output.Frequency.Add(sortedFrequency[i].Key, sortedFrequency[i].Value); } } foreach (int i in output.Frequency.Keys) { if (!output.Distances.ContainsKey(output.Frequency[i])) { output.Distances.Remove(output.Frequency[i]); } else { foreach (int j in output.Frequency.Keys) { if (!output.Distances[output.Frequency[i]].Contains(output.Frequency[j]) || i == j) { output.Distances[output.Frequency[i]].Remove(output.Frequency[j]); } } } } return(output); }
/* * public void IQP_PublishFITSData(FileParseResult FileResObj) * { * this.Invoke(new Action(() => this.IQP_PublishFITSDataInvoke(FileResObj))); * } */ private void IQP_PublishFITSDataInvoke(FileParseResult FileResObj) { //Grid block int curRowIndex = dataGridFileData.Rows.Add(); dataGridFileData.Rows[curRowIndex].Cells["dataGridData_filename"].Value = Path.GetFileName(FileResObj.FITSFileName); dataGridFileData.Rows[curRowIndex].Cells["dataGridData_Bg"].Value = FileResObj.QualityData.SkyBackground.ToString("P", CultureInfo.InvariantCulture); dataGridFileData.Rows[curRowIndex].Cells["dataGridData_AspectRatio"].Value = FileResObj.QualityData.AspectRatio.ToString("N3", CultureInfo.InvariantCulture); dataGridFileData.Rows[curRowIndex].Cells["dataGridData_Stars"].Value = FileResObj.QualityData.StarsNumber.ToString("N0", CultureInfo.InvariantCulture); dataGridFileData.Rows[curRowIndex].Cells["dataGridData_Alt"].Value = FileResObj.HeaderData.ObjAlt.ToString("N0", CultureInfo.InvariantCulture); dataGridFileData.Rows[curRowIndex].Cells["dataGridData_Exp"].Value = FileResObj.HeaderData.ImageExposure.ToString("N0", CultureInfo.InvariantCulture); DateTime DateObsUTC = DateTime.SpecifyKind(FileResObj.HeaderData.DateObsUTC, DateTimeKind.Utc); //set it to UTC dataGridFileData.Rows[curRowIndex].Cells["dataGridData_DateTime"].Value = DateObsUTC.ToLocalTime().ToString("yyyy-MM-dd HH:mm:ss"); prevFWHM = curFWHM; curFWHM = FileResObj.FWHM; dataGridFileData.Rows[curRowIndex].Cells["dataGridData_FWHM"].Value = curFWHM.ToString("N2", CultureInfo.InvariantCulture); IQP_statImagesProcessed++; IQP_UpdateStatistics(); // on every invoke //publish to short form txtShort_IQP_LastFWHM.Text = curFWHM.ToString("N2", CultureInfo.InvariantCulture); txtShort_IQP_LastFWHM.Text = txtShort_IQP_LastFWHM.Text + "(" + ((curFWHM - prevFWHM) > 0 ? "+" : "") + (curFWHM - prevFWHM).ToString("N1", CultureInfo.InvariantCulture) + ")"; if (curFWHM > 5) { txtShort_IQP_LastFWHM.BackColor = OffColor; } else if (curFWHM > prevFWHM) { txtShort_IQP_LastFWHM.BackColor = InterColor; } else { txtShort_IQP_LastFWHM.BackColor = DefBackColorTextBoxes; } txtShort_IQPbg.Text = FileResObj.QualityData.SkyBackground.ToString("P", CultureInfo.InvariantCulture); if (FileResObj.QualityData.SkyBackground > 0.05) { txtShort_IQP_LastFWHM.BackColor = InterColor; } else if (FileResObj.QualityData.SkyBackground > 0.1) { txtShort_IQP_LastFWHM.BackColor = OffColor; } else { txtShort_IQP_LastFWHM.BackColor = DefBackColorTextBoxes; } }
public override object VisitInstructions([NotNull] GrammarParser.InstructionsContext context) { var result = new FileParseResult(); if (context == null) { return(result); } try { foreach (var child in context.children) { var instructionType = child.GetChild(0).GetText(); switch (instructionType) { case "cntrl": var controller = (ControllerModel)Visit(child); ShouldBeUnique("Controller", controller.Name, result.Controllers.Select(x => x.Name), child); result.Controllers.Add(controller); break; case "model": var model = (EntityModel)Visit(child); ShouldBeUnique("Model", model.Name, result.Models.Select(x => x.Name), child); result.Models.Add(model); break; default: throw new GrammarException($"Can not parse isntruction {instructionType}") { FromLine = child.SourceInterval.a, ToLine = child.SourceInterval.b }; } } return(result); } catch { throw; } }
protected virtual FileParseResult ParseFiles(GeneratorExecutionContext context) { var result = new FileParseResult(); foreach (var file in context.AdditionalFiles .Where(x => Path.GetExtension(x.Path) == _extention)) { var fileParse = _mainParser.ParseProtoFile(file.GetText()?.ToString()); result.Controllers.AddRange(fileParse.Controllers); result.Models.AddRange(fileParse.Models); } return(result); }
private static FileParseResult ParseFile(string content) { const string Delimiter = "/// <summary>"; var split = Regex.Split(content, $"({Delimiter})|(// END MEMBERS)", RegexOptions.ExplicitCapture); var headerSectionCount = split[0].Contains("partial class") ? 1 : 2; var memberSections = split.Skip(headerSectionCount).Take(split.Length - headerSectionCount - 1); var result = new FileParseResult { Header = string.Join(Delimiter, split.Take(headerSectionCount)), Footer = split.Last(), Members = memberSections.Select(s => ParseMember(Delimiter + s)).ToList(), }; return(result); }
public FileRating Analyse(string fileName, FileParseResult input, IEnumerable <string> words) { FileRating result = new FileRating { FileName = fileName, Occurances = words.Where(s => input.Hash.ContainsValue(s)).ToList(), Missing = words.Where(s => !input.Hash.ContainsValue(s)).ToList(), MinDistances = new int[words.Count(s => input.Hash.ContainsValue(s))], Rating = 0 }; if (result.Occurances.Count == 0) { return(null); } var hases = words.Select(w => input.Hash.Any(h => string.Equals(h.Value, w)) ? input.Hash.First(h => string.Equals(h.Value, w)).Key : 0); var distances = input.Distances.Where(d => hases.Any(h => h == d.Key)); int index = 0; foreach (var hash in hases) { if (hash == 0) { continue; } int rating = 0; foreach (var distance in distances) { if (distance.Value.Contains(hash)) { rating++; } } result.MinDistances[index++] = rating; } result.Rating = result.Occurances.Count + result.MinDistances.Sum(); return(result); }
static void DisplayResult(FileParseResult result) { if (!result.AnyInvalidLine && !result.AnyValidLine) { Console.WriteLine("No results to display."); } else { if (result.AnyValidLine) { Console.WriteLine($"Lines with max sum of elements: {result.LinesWithMaxSum.ToCommaSeparatedString()}"); } if (result.AnyInvalidLine) { Console.WriteLine($"Invalid lines: {result.InvalidLines.ToCommaSeparatedString()}"); } } }
public BusinessEntities.FileParseResult Parse(string FilePath) { FileParseResult _fileParseResult = new FileParseResult(); _fileParseResult.FileParseResultType = FileParseResultType.ValidFileClaim; //Logger.LogMessage(PREPROCESS_FILEPARSER.TRACE_MSG, "****WorkComp.Net : Pre Processor -> Start parsing valid AT XML File *****"); XmlReaderSettings settings = new XmlReaderSettings(); settings.ConformanceLevel = ConformanceLevel.Fragment; string xml = "<?xml version=\"1.0\" ?><transmissions version=\"1.0\" xmlns=\"workcomp.net/wcn2/public\"><transmission><header></header><reports><report><R4>FL</R4><R5></R5>JCN<R15>ClaimTest1</R15></report></reports><trailer recordcount=\"1\"/></transmission></transmissions>"; StringReader sreader = new StringReader(xml); Console.WriteLine(xml.Substring(195, 10)); using (XmlReader reader = XmlReader.Create(sreader, settings)) { try { string tranmissionsattr = string.Empty; string header = string.Empty; while (reader.Read()) { if (reader.NodeType == XmlNodeType.Element && reader.Name.ToLower() == "transmissions") { for (int i = 0; i < reader.AttributeCount; i++) { reader.MoveToAttribute(i); tranmissionsattr = tranmissionsattr + " " + reader.Name + "=\"" + reader.GetAttribute(i) + "\""; } reader.MoveToElement(); } } } catch (Exception e) { string error = e.Message; } } return(_fileParseResult); }
public async Task ProcessCsvFile_Succeed() { _fileParserFactoryMock.Setup(factory => factory.GetParser(".csv")) .Returns(_csvFileParserMock.Object); var fakeFileParseResult = new FileParseResult { IsSucceed = true, TransactionList = new List <Transaction> { new Transaction { Amount = 1000M, Currency = "USD", Date = DateTime.Parse("20/02/2019 12:33:16"), Status = TransactionStatusEnum.A, TransactionId = "Invoice0000001" }, new Transaction { Amount = 300M, Currency = "USD", Date = DateTime.Parse("21/02/2019 02:04:59"), Status = TransactionStatusEnum.R, TransactionId = "Invoice0000002" }, } }; var fakeEfContextTransactions = new List <EfContext.Transaction> { new EfContext.Transaction { Amount = 1000M, Currency = "USD", Date = DateTime.Parse("20/02/2019 12:33:16"), Status = 1, TransactionId = "Invoice0000001" }, new EfContext.Transaction { Amount = 300M, Currency = "USD", Date = DateTime.Parse("21/02/2019 02:04:59"), Status = 2, TransactionId = "Invoice0000002" } }; var fakeValidationResult = new ValidationResult { IsSucceed = true }; _transactionValidatorMock.Setup(validator => validator.ValidateTransactions(fakeFileParseResult.TransactionList)) .Returns(fakeValidationResult); _transactionRepositoryMock.Setup(repo => repo.InsertListAsync(fakeEfContextTransactions)); _automapperMock.Setup(mapper => mapper.Map <List <EfContext.Transaction> >(fakeFileParseResult.TransactionList)) .Returns(fakeEfContextTransactions); var fileContent = "Invoice0000001, \"1,000.00\", USD, 20/02/2019 12:33:16, Approved\n" + "Invoice0000002, 300.00, USD, 21/02/2019 02:04:59, Failed"; _csvFileParserMock.Setup(parser => parser.ParseFile(fileContent)) .Returns(fakeFileParseResult); var parseResult = await _fileManager.ProcessFileAsync(fileContent, ".csv"); Assert.IsTrue(parseResult.IsSucceed); Assert.IsTrue(string.IsNullOrEmpty(parseResult.ErrorMessage)); Assert.AreEqual(2, parseResult.ProcessedTransactions); }
public async Task ProcessXmlFile_Succeed() { _fileParserFactoryMock.Setup(factory => factory.GetParser(".xml")) .Returns(_csvFileParserMock.Object); var fakeFileParseResult = new FileParseResult { IsSucceed = true, TransactionList = new List <Transaction> { new Transaction { Amount = 1000M, Currency = "USD", Date = DateTime.Parse("20/02/2019 12:33:16"), Status = TransactionStatusEnum.A, TransactionId = "Invoice0000001" }, new Transaction { Amount = 300M, Currency = "USD", Date = DateTime.Parse("21/02/2019 02:04:59"), Status = TransactionStatusEnum.R, TransactionId = "Invoice0000002" }, } }; var fakeEfContextTransactions = new List <EfContext.Transaction> { new EfContext.Transaction { Amount = 1000M, Currency = "USD", Date = DateTime.Parse("20/02/2019 12:33:16"), Status = 1, TransactionId = "Invoice0000001" }, new EfContext.Transaction { Amount = 300M, Currency = "USD", Date = DateTime.Parse("21/02/2019 02:04:59"), Status = 2, TransactionId = "Invoice0000002" } }; var fakeValidationResult = new ValidationResult { IsSucceed = true }; _transactionValidatorMock.Setup(validator => validator.ValidateTransactions(fakeFileParseResult.TransactionList)) .Returns(fakeValidationResult); _transactionRepositoryMock.Setup(repo => repo.InsertListAsync(fakeEfContextTransactions)); _automapperMock.Setup(mapper => mapper.Map <List <EfContext.Transaction> >(fakeFileParseResult.TransactionList)) .Returns(fakeEfContextTransactions); var fileContent = $"<Transactions>" + "<Transaction id=\"Inv00001\">" + "<TransactionDate>2019-01-23T13:45:10</TransactionDate>" + "<PaymentDetails>" + "<Amount>200.00</Amount>" + "<CurrencyCode>USD</CurrencyCode>" + "</PaymentDetails>" + "<Status>Done</Status>" + "</Transaction>" + "<Transaction id=\"Inv00002\">" + "<TransactionDate>2019-01-24T16:09:15</TransactionDate>" + "<PaymentDetails>" + "<Amount>10000.00</Amount>" + "<CurrencyCode>EUR</CurrencyCode>" + "</PaymentDetails>" + "<Status>Rejected</Status>" + "</Transaction>" + "</Transactions>"; _csvFileParserMock.Setup(parser => parser.ParseFile(fileContent)) .Returns(fakeFileParseResult); var parseResult = await _fileManager.ProcessFileAsync(fileContent, ".xml"); Assert.IsTrue(parseResult.IsSucceed); Assert.IsTrue(string.IsNullOrEmpty(parseResult.ErrorMessage)); Assert.AreEqual(2, parseResult.ProcessedTransactions); }
public void IQP_PublishFITSData(FileParseResult FileResObj) { this.Invoke(new Action(() => this.IQP_PublishFITSDataInvoke(FileResObj))); }