/// <summary> /// Carves up a mime-type and returns a ParseResults object /// </summary> /// <param name="mimeType"></param> /// <returns></returns> /// <remarks>For example, the media range 'application/xhtml;q=0.5' would get parsed into: /// ('application', 'xhtml', {'q', '0.5'})</remarks> public static ParseResults ParseMimeType(String mimeType) { String[] parts = mimeType.Split(';'); ParseResults results = new ParseResults { Parameters = new Dictionary <String, String>() }; for (int i = 1; i < parts.Length; i++) { String p = parts[i]; String[] subParts = p.Split('='); if (subParts.Length == 2) { results.Parameters[subParts[0].Trim()] = subParts[1].Trim(); } } String fullType = parts[0].Trim(); // Java URLConnection class sends an Accept header that includes a // single "*" - Turn it into a legal wildcard. if (fullType.Equals("*")) { fullType = "*/*"; } String[] types = fullType.Split('/'); results.Type = types[0].Trim(); results.SubType = types[1].Trim(); return(results); }
/// <summary> /// Find the best match for a given mimeType against a list of media_ranges /// that have already been parsed by MimeParse.parseMediaRange(). /// </summary> /// <remarks>Returns a /// tuple of the fitness value and the value of the 'q' quality parameter of /// the best match, or (-1, 0) if no match was found. Just as for /// quality_parsed(), 'parsed_ranges' must be a list of parsed media ranges. /// </remarks> public static FitnessAndQuality FitnessAndQualityParsed(String mimeType, ICollection <ParseResults> parsedRanges) { int bestFitness = -1; float bestFitQ = 0; ParseResults target = ParseMediaRange(mimeType); foreach (ParseResults range in parsedRanges) { if ((target.Type.Equals(range.Type) || range.Type.Equals("*") || target.Type.Equals("*")) && (target.SubType.Equals(range.SubType) || range.SubType.Equals("*") || target.SubType.Equals("*"))) { foreach (String k in target.Parameters.Keys) { int paramMatches = 0; if (!k.Equals("q") && range.Parameters.ContainsKey(k) && target.Parameters[k].Equals(range.Parameters[k])) { paramMatches++; } int fitness = (range.Type.Equals(target.Type)) ? 100 : 0; fitness += (range.SubType.Equals(target.SubType)) ? 10 : 0; fitness += paramMatches; if (fitness > bestFitness) { bestFitness = fitness; bestFitQ = range.Parameters.ContainsKey("q") ? float.Parse(range.Parameters["q"]) : 0; } } } } return(new FitnessAndQuality(bestFitness, bestFitQ)); }
public ParseResults Parse(Stream input, Stream output) { var result = new ParseResults(); try { result.FileSize = input.Length; result.FileType = FileType; //using (var reader = new StreamReader(input, InputEncoding)) using (var reader = new CsvParser(new StreamReader(input, InputEncoding))) using (var writer = new StreamWriter(output, Encoding.BigEndianUnicode)) { var array = reader.Read(); // header array = reader.Read(); while (array != null) { result.RowsInFile += 1; var customer = GetCustomer(array); writer.WriteLine(Format.CustomerToString(customer)); array = reader.Read(); } } result.Status = ParseStatus.Success; } catch (Exception ex) { result.ErrorMessage = ErrorMessage.FromException(ex); } return(result); }
private Member[] GetMembers(ParseResults firstFileResults, ParseResults lastFileResults) { loggingService.LogMethodInvoked(); var members = new List <Member>(lastFileResults.UsersData.Count()); foreach (UserData lastUserData in lastFileResults.UsersData) { UserData firstUserData = firstFileResults.UsersData.FirstOrDefault(user => user.Name == lastUserData.Name); if (firstUserData == default(UserData)) { members.Add(new Member(lastUserData.Name, lastUserData.FriendlyName, lastUserData.BitcoinAddress, lastUserData.TeamNumber, 0, 0, lastUserData.TotalPoints, lastUserData.TotalWorkUnits)); continue; } members.Add(new Member(lastUserData.Name, lastUserData.FriendlyName, lastUserData.BitcoinAddress, lastUserData.TeamNumber, firstUserData.TotalPoints, firstUserData.TotalWorkUnits, lastUserData.TotalPoints - firstUserData.TotalPoints, lastUserData.TotalWorkUnits - firstUserData.TotalWorkUnits)); } loggingService.LogMethodFinished(); return(members.ToArray()); }
private void HandleUploadingFile(HttpContext context) { var parseResult = new ParseResults(); var bulkInsertResult = new BulkInsertResults(); UploadResults result = null; try { var tempFileName = Helper.GenerateTempFileName(); using (var output = new FileStream(tempFileName, FileMode.CreateNew)) { parseResult = Import.GenerateBulkInsertFile(context.Request.Files[0].InputStream, output); } if (parseResult.Status == ParseStatus.Success) { bulkInsertResult = Import.BulkInsert(tempFileName, parseResult.FileType); } result = new UploadResults(parseResult, bulkInsertResult); result.FileName = context.Request.Files[0].FileName; File.Delete(tempFileName); } catch (Exception ex) { if (result == null) { result = new UploadResults(parseResult, bulkInsertResult); } result.SetCustomErrorMessage(ErrorMessage.FromException(ex)); } var response = JsonConvert.SerializeObject(new { status = (int)result.Status, message = result.ErrorMessage, fileName = result.FileName, parse = new { type = (int)result.ParseResults.FileType, size = result.ParseResults.FileSize, rows = result.ParseResults.RowsInFile, message = result.ParseResults.ErrorMessage, status = (int)result.ParseResults.Status }, bulkInsert = new { affected = result.BulkInsertResults.RowsAffected, message = result.BulkInsertResults.ErrorMessage, status = (int)result.BulkInsertResults.Status } }); context.Response.Write(response); context.Response.ContentType = "application/json"; }
public ParseResults ParseFile(string filename) { var linesIgnored = new List <string>(); var transactions = new List <Transaction>(); try { var fileContent = File.ReadAllLines(filename); var headerRow = fileContent.FirstOrDefault(); if (string.IsNullOrEmpty(headerRow)) { throw new Exception("Invalid Header Row"); } var cols = headerRow.Split(','); var accountCol = cols.ToList().IndexOf("Account"); var descriptionCol = cols.ToList().IndexOf("Description"); var currencyCodeCol = cols.ToList().IndexOf("Currency Code"); var amountCol = cols.ToList().IndexOf("Amount"); if (accountCol == -1 || descriptionCol == -1 || currencyCodeCol == -1 || amountCol == -1) { throw new Exception("Invalid column headers"); } foreach (var csvRow in fileContent.Skip(1).Where(csvRow => !string.IsNullOrEmpty(csvRow))) { try { var values = csvRow.Split(','); var tran = new Transaction(new TransactionDto(null, values[accountCol], values[descriptionCol], values[currencyCodeCol], decimal.Parse(values[amountCol]))); if (tran.IsValid() && _currencyService.IsValidCurrency(tran.CurrencyCode)) { transactions.Add(tran); } else { linesIgnored.Add(csvRow); } } catch { linesIgnored.Add(csvRow); } } foreach (var transaction in transactions) { _service.SaveTransaction(transaction); } } catch (Exception ex) { return(ParseResults.Error(ex.Message)); } return(linesIgnored.Any() ? ParseResults.Warning(transactions.Count, linesIgnored): ParseResults.Ok(transactions.Count)); }
public ParseResults ValidateFile(FilePayload filePayload) { fileCompressionService.DecompressFile(filePayload.DownloadFilePath, filePayload.DecompressedDownloadFilePath); fileReaderService.ReadFile(filePayload); ParseResults parseResults = statsFileParserService.Parse(filePayload); filePayload.FileUtcDateTime = parseResults.DownloadDateTime; return(parseResults); }
public CommandResults Parse(string command, IStringReader reader) { CommandContext contextBuilder = new CommandContext(reader.GetCursor()); ParseResults parseResults = ParseNodes(Root, reader, contextBuilder); if (parseResults.Reader.CanRead()) { if (parseResults.Errors.Count > 0) { return(new CommandResults(false, parseResults.Errors[^ 1], parseResults.Context.Results));
public ParseResults Parse(FilePayload filePayload) { ParseResults results = innerService.Parse(filePayload); if (settingsService.IsOneHundredUsersFilterEnabled()) { return(new ParseResults(results.DownloadDateTime, results.UsersData.Take(100), results.FailedUsersData)); } return(results); }
public void setup() { dispatcher = new CommandDispatcher <object>(); dispatcher.Register(r => r.Literal("command").Executes(c => 0)); dispatcher.Register(r => r.Literal("redirect").Redirect(dispatcher.GetRoot())); dispatcher.Register(r => r.Literal("fork").Fork(dispatcher.GetRoot(), o => new List <object> { new object(), new object(), new object() })); simple = dispatcher.Parse("command", new object()); singleRedirect = dispatcher.Parse("redirect command", new object()); forkedRedirect = dispatcher.Parse("fork command", new object()); }
public void Parse_WhenDisabled_DoesNotModifyResults() { settingsMock.Enabled.Returns(false); var expected = new ParseResults(downloadDateTime, null, null); innerServiceMock.Parse(FilePayload).Returns(expected); ParseResults actual = systemUnderTest.Parse(FilePayload); Assert.That(actual, Is.EqualTo(expected)); }
public bool LoadTemplate(string filename) { ParseResults pr = ParseTemplate(filename); if (pr.success == false) { Warning(this, pr.Error); } RecordTemplate = pr.record; TemplateFileName = filename; return(true); }
public ParseResults Parse(FilePayload filePayload) { ParseResults results = innerService.Parse(filePayload); if (settings.Enabled) { return(new ParseResults(results.DownloadDateTime, results.UsersData.Where(data => data.TotalPoints > 0), results.FailedUsersData)); } return(results); }
public ParseResults Parse(FilePayload filePayload) { ParseResults results = innerService.Parse(filePayload); if (settings.Enabled) { return(new ParseResults(results.DownloadDateTime, results.UsersData.Where(data => !string.IsNullOrWhiteSpace(data.Name)), results.FailedUsersData)); } return(results); }
public void Parse_WhenInvoked_FiltersResults() { settingsMock.Enabled.Returns(true); innerServiceMock.Parse(FilePayload).Returns(new ParseResults(downloadDateTime, new[] { new UserData(), new UserData(0, null, 1, 0, 0) }, new[] { new FailedUserData() })); ParseResults actual = systemUnderTest.Parse(FilePayload); Assert.That(actual.UsersData.Count(data => data.TotalPoints == 0), Is.EqualTo(0)); }
public async Task AddTransactionsShouldBeHandled() { const string csvString = @"“Invoice777”,”600.00”,”EUR”,”10/17/2019 02:04:59”, “Done”"; var csvDataStream = new MemoryStream(Encoding.UTF8.GetBytes(csvString)); const string csvDatePattern = "dd/MM/yyyy hh:mm:ss"; var publicId = "Invoice777"; var code = "EUR"; var status = "D"; var amount = 600; var date = DateTimeOffset.ParseExact("10/17/2019 02:04:59", csvDatePattern, CultureInfo.InvariantCulture); var entity = new Transaction { PublicId = publicId, Amount = amount, Date = date, Code = code, Status = status }; var model = new TransactionCreateModel { PublicId = publicId, Amount = amount, Date = date, Code = code, Status = status }; var entities = new List <Transaction> { entity }; var models = new List <TransactionCreateModel> { model }; var parseResults = new ParseResults(models); _transactionsParserProviderMock.Setup(x => x.GetParser(csvDataStream)).ReturnsAsync(_transactionsDataParser.Object); _transactionsDataParser.Setup(x => x.CanParseData(csvDataStream)).ReturnsAsync(true); _transactionsDataParser.Setup(x => x.ParseAllFile()).ReturnsAsync(parseResults); _mapperMock.Setup(x => x.Map <IEnumerable <Transaction> >(models)).Returns(entities); _transactionRepositoryMock.Setup(x => x.Add(entity)); await _transactionsService.AddTransactions(csvDataStream); _transactionsParserProviderMock.Verify(x => x.GetParser(csvDataStream), Times.Once); _transactionsDataParser.Verify(x => x.CanParseData(csvDataStream), Times.Once); _transactionsDataParser.Verify(x => x.ParseAllFile(), Times.Once); _mapperMock.Verify(x => x.Map <IEnumerable <Transaction> >(models), Times.Once); _transactionRepositoryMock.Verify(x => x.Add(entity), Times.Once); }
private async Task <ParseResults> GetValidatedFile(ValidatedFile validatedFile) { loggingService.LogMethodInvoked(); var filePayload = new FilePayload(); filePayloadApiSettingsService.SetFilePayloadApiSettings(filePayload); await dataStoreService.DownloadFile(filePayload, validatedFile); ParseResults results = fileValidationService.ValidateFile(filePayload); loggingService.LogMethodFinished(); return(results); }
/// <summary> Given a source string and an index into that string where a .Net /// method begins (i.e. the location of the @ character), parse the name /// of the .Net method and the list of parameters. Return a new ParseResults /// instance of both components were found, otherwise return null. /// /// For example, if this string were passed: '@random() @strip("(801) 323-1131")', /// and the <i>position</i> parameter were 11, this function would return /// a new ParseResults with Position set to 32, MethodName set to 'strip', /// and Parameters having a single parameter of "(801) 323-1131". /// </summary> public static ParseResults parse(string src, int position) { ParseResults results = new ParseResults(); int i = position + 1; bool inQuote = false; StringBuilder buf = new StringBuilder(); while (i < src.Length) { if (src[i] == '"') { inQuote = !inQuote; } else if (src[i] == '(') { if (!inQuote) { results.MethodName = src.Substring(position + 1, i - position - 1); buf.Length = 0; } else { buf.Append('('); } } else if (src[i] == ')') { if (!inQuote) { results.Parameters = new MyStringTokenizer(buf.ToString(), ','); results.Position = i + 1; return(results); } else { buf.Append(')'); } } else { buf.Append(src[i]); } i++; } return(null); }
public ParseResults Parse(FilePayload filePayload) { ParseResults results = innerService.Parse(filePayload); if (settings.Enabled) { return(new ParseResults(results.DownloadDateTime, results.UsersData.Where(data => !data.Name?.StartsWith("google", StringComparison.OrdinalIgnoreCase) ?? true), results.FailedUsersData)); } return(results); }
public void Parse_WhenInvoked_FiltersResults() { settingsMock.Enabled.Returns(true); innerServiceMock.Parse(FilePayload).Returns(new ParseResults(downloadDateTime, new[] { new UserData(), new UserData { BitcoinAddress = "addy" } }, new[] { new FailedUserData() })); ParseResults actual = systemUnderTest.Parse(FilePayload); Assert.That(actual.UsersData.Count(), Is.EqualTo(1)); Assert.That(actual.UsersData.Count(data => string.IsNullOrWhiteSpace(data.BitcoinAddress)), Is.EqualTo(0)); }
/// <summary> /// Carves up a media range and returns a ParseResults. /// </summary> /// <remarks> /// For example, the media range 'application/*;q=0.5' would get parsed into: /// ('application', '*', {'q', '0.5'}) /// In addition this function also guarantees that there is a value for 'q' /// in the params dictionary, filling it in with a proper default if /// necessary. /// </remarks> public static ParseResults ParseMediaRange(String range) { ParseResults results = ParseMimeType(range); if (!results.Parameters.ContainsKey("q") || String.IsNullOrEmpty(results.Parameters["q"])) { results.Parameters["q"] = "1"; } String q = results.Parameters["q"]; float f = float.Parse(q); if (f < 0 || f > 1) { results.Parameters["q"] = "1"; } return(results); }
private void GetProject(ArchAngel.Providers.EntityModel.Model.MappingLayer.MappingSet mappingSet, XmlDocument csprojDocument, string filename) { var hbmFiles = ProjectLoader.GetHBMFilesFromCSProj(new CSProjFile(csprojDocument, filename), fileController); // Load HBMs var mappings = hbmFiles.Select(f => MappingFiles.Version_2_2.Utility.Open(f)).ToList(); // Parse the CSharp files var csharpFiles = ProjectLoader.GetCSharpFilesFromCSProj(new CSProjFile(csprojDocument, filename), fileController); ParseResults parseResults = ParseResults.ParseCSharpFiles(csharpFiles); //foreach (ArchAngel.Providers.CodeProvider.DotNet.Class c in parseResults.parsedClasses) //{ //} //mappingSet.CodeParseResults = parseResults; // Clear the current mapped class. //mappingSet.EntitySet.Entities.ForEach(e => e.MappedClass = null); // Map the Entity objects to the parsed Class var entities = mappingSet.EntitySet.Entities.ToDictionary(e => e.Name); foreach (var hm in mappings) { foreach (var hClass in hm.Classes()) { var fullClassName = HibernateMappingHelper.ResolveFullClassName(hClass, hm); var shortClassName = HibernateMappingHelper.ResolveShortClassName(hClass, hm); // try find the entity Entity entity; if (entities.TryGetValue(shortClassName, out entity)) { // try find class in parse results var parsedClass = parseResults.FindClass(fullClassName, entity.Properties.Select(p => p.Name).ToList()); entity.MappedClass = parsedClass; } else { //Log.InfoFormat("Could not find entity for class named {0} in the NHibernate project on disk.", shortClassName); } } } }
public void SetUp() { filePayloadMock = new FilePayload { DownloadFilePath = "DownloadFilePath", DecompressedDownloadFilePath = "DecompressedDownloadFilePath" }; parseResultsMock = new ParseResults(DateTime.Today, null, null); fileCompressionServiceMock = Substitute.For <IFileCompressionService>(); fileReaderServiceMock = Substitute.For <IFileReaderService>(); statsFileParserServiceMock = Substitute.For <IStatsFileParserService>(); statsFileParserServiceMock.Parse(filePayloadMock).Returns(parseResultsMock); systemUnderTest = new FileValidationProvider(fileCompressionServiceMock, fileReaderServiceMock, statsFileParserServiceMock); }
private Team[] GetTeams(ParseResults lastFileResults) { loggingService.LogMethodInvoked(); var teams = new List <Team>(); foreach (UserData userData in lastFileResults.UsersData) { long teamNumber = userData.TeamNumber; if (teams.Any(team => team.TeamNumber == teamNumber)) { continue; } // TODO: Get the team name teams.Add(new Team(teamNumber, "")); } loggingService.LogMethodFinished(); return(teams.ToArray()); }
private FoldingUser[] GetFoldingUsers(ParseResults firstFileResults, ParseResults lastFileResults) { loggingService.LogMethodInvoked(); int length = lastFileResults.UsersData.Count(); var foldingUsers = new List <FoldingUser>(length); foreach (UserData userData in lastFileResults.UsersData) { UserData previous = firstFileResults.UsersData.FirstOrDefault(user => user.Name == userData.Name && user.TeamNumber == userData.TeamNumber); if (previous is UserData) { var user = new FoldingUser(userData.FriendlyName, userData.BitcoinAddress, userData.TotalPoints - previous.TotalPoints, userData.TotalWorkUnits - previous.TotalWorkUnits); if (user.PointsGained < 0) { throw new InvalidDistributionState( "Negative points earned was detected for a user. There may be an issue with the database state or the stat files download. Contact development"); } if (user.WorkUnitsGained < 0) { throw new InvalidDistributionState( "Negative work units earned was detected for a user. There may be an issue with the database state or the stat files download. Contact development"); } foldingUsers.Add(user); } else { foldingUsers.Add(new FoldingUser(userData.FriendlyName, userData.BitcoinAddress, userData.TotalPoints, userData.TotalWorkUnits)); } } loggingService.LogMethodFinished(); return(foldingUsers.ToArray()); }
public void Parse_WhenInvoked_FiltersResults() { settingsMock.Enabled.Returns(true); innerServiceMock.Parse(FilePayload).Returns(new ParseResults(downloadDateTime, new[] { new UserData(), new UserData(0, "user", 0, 0, 0), new UserData(0, "GOOGLE", 0, 0, 0), new UserData(0, "Google", 0, 0, 0), new UserData(0, "google", 0, 0, 0), new UserData(0, "google123456", 0, 0, 0) }, new[] { new FailedUserData() })); ParseResults actual = systemUnderTest.Parse(FilePayload); Assert.That(actual.UsersData.Count(), Is.EqualTo(2)); Assert.That( actual.UsersData.Count(data => data.Name?.StartsWith("google", StringComparison.OrdinalIgnoreCase) ?? false), Is.EqualTo(0)); }
public UploadResults(ParseResults parseResults, BulkInsertResults bulkInsertResults) { ParseResults = parseResults; BulkInsertResults = bulkInsertResults; }
public LoadResult LoadEntityModelFromCSProj(string csprojFilePath, NHConfigFile nhConfigFile) { _progress.SetCurrentState("Loading Entities From Visual Studio Project", ProgressState.Normal); EntityLoader entityLoader = new EntityLoader(new FileController()); XmlDocument doc = new XmlDocument(); doc.LoadXml(fileController.ReadAllText(csprojFilePath)); CSProjFile csProjFile = new CSProjFile(doc, csprojFilePath); var hbmFiles = GetHBMFilesFromCSProj(csProjFile); if (IsFluentProject(csProjFile)) { ArchAngel.Interfaces.SharedData.CurrentProject.SetUserOption("UseFluentNHibernate", true); string tempFluentPath = Path.Combine(Environment.GetFolderPath(Environment.SpecialFolder.CommonApplicationData), "Visual NHibernate" + Path.DirectorySeparatorChar + "Temp" + Path.DirectorySeparatorChar + "FluentTemp"); var fluentHbmFiles = GetHBMFilesForFluentFromCSProj(csProjFile, tempFluentPath); // Combine the actual HBM files with the ones derived from FluentNH hbmFiles = hbmFiles.Union(fluentHbmFiles); } else { ArchAngel.Interfaces.SharedData.CurrentProject.SetUserOption("UseFluentNHibernate", false); } var csFiles = GetCSharpFilesFromCSProj(doc, csprojFilePath); var nhvFiles = GetNHVFilesFromCSProj(doc, csprojFilePath); //NHConfigFile nhConfigFile = GetNhConfigFile(csProjFile, fileController); var databaseConnector = nhConfigFile == null ? null : nhConfigFile.DatabaseConnector; //////// GFH // We need to fetch ALL tables, because HBM mappings don't include association tables, or at least it's difficult to find them. List <SchemaData> tablesToFetch = null; // entityLoader.GetTablesFromHbmFiles(hbmFiles); IDatabaseLoader loader = null; IDatabase database = null; if (databaseConnector != null) { database = GetDatabase(databaseConnector, out loader, tablesToFetch); } _progress.SetCurrentState("Parsing your existing Model Project", ProgressState.Normal); var parseResults = ParseResults.ParseCSharpFiles(csFiles); _progress.SetCurrentState("Loading Mapping Information From NHibernate Mapping Files", ProgressState.Normal); var mappingSet = entityLoader.GetEntities(hbmFiles, parseResults, database); entityLoader.ApplyConstraints(mappingSet, nhvFiles, parseResults); #region Create References // Get a set of all Guids for tables that we will want to create references from HashSet <Guid> existingTables = new HashSet <Guid>(database.Tables.Select(t => t.InternalIdentifier)); foreach (var mappedTable in mappingSet.Mappings.Select(m => m.FromTable)) { existingTables.Add(mappedTable.InternalIdentifier); } HashSet <Guid> processedRelationships = new HashSet <Guid>(); foreach (var table in database.Tables) { foreach (var directedRel in table.DirectedRelationships) { var relationship = directedRel.Relationship; if (processedRelationships.Contains(relationship.InternalIdentifier)) { continue; // Skip relationships that have already been handled. } if (relationship.MappedReferences().Any()) { continue; // Skip relationships that have been mapped by the user. } if (existingTables.Contains(directedRel.ToTable.InternalIdentifier) == false) { continue; // Skip relationships that have tables that have no mapped Entity } if (relationship.PrimaryTable.MappedEntities().FirstOrDefault() == null || relationship.ForeignTable.MappedEntities().FirstOrDefault() == null) { continue; } ArchAngel.Providers.EntityModel.Controller.MappingLayer.MappingProcessor.ProcessRelationshipInternal(mappingSet, relationship, new ArchAngel.Providers.EntityModel.Controller.MappingLayer.OneToOneEntityProcessor()); processedRelationships.Add(relationship.InternalIdentifier); } } #endregion foreach (var entity in mappingSet.EntitySet.Entities) { foreach (var reference in entity.References) { if (!mappingSet.EntitySet.References.Contains(reference)) { mappingSet.EntitySet.AddReference(reference); } } } LoadResult result = new LoadResult(); result.MappingSet = mappingSet; result.DatabaseLoader = loader; result.NhConfigFile = nhConfigFile; result.CsProjFile = csProjFile; return(result); }
private static ParseResults ParseTemplate(string filename) { /* * MSIS_ID at 0 for 20 type CHAR * ADJ_IND at 20 for 2 type SMALLINT * TOS at 22 for 2 type SMALLINT * TYPE_CLM at 24 for 2 type SMALLINT * DATE_PAID at 26 for 6 type VARCHAR * AMT_PAID at 32 for 4 type INTEGER NULL * BEG_DOS at 36 for 4 type INTEGER NOTNULL * END_DOS at 40 for 8 type LONG * PROV_ID at 42 for 14 type DATETIME "YYYYMMHH24MMSI" NULL * DOLLA_AMOUNT at 56 for 1 type CHAR SUB_IND * SUB_FILLER at 57 for 150 type FILLER * * SUB J * DATE_PAID at 57 for 6 type VARCHAR * AMT_PAID at 63 for 4 type INTEGER NULL * BLARB_INDICATOR at 67 for 4 type INTEGER SUB_IND * SUB_FILLER at 71 for 75 type FILLER * * SUB 45 * ADJ_IND at 57 for 2 type SMALLINT * TOS at 59 for 2 type SMALLINT * TYPE_CLM at 61 for 2 type SMALLINT * * Indicators * must be at the end and followed by filler to reach full length * * Number of Rows: 1190205 */ ParseResults pr = new ParseResults(); if (filename == String.Empty) { pr.success = false; pr.Error = "\n Empty File Name \n"; return pr; } string dotout; using (StreamReader reader = new StreamReader(filename)) { try { dotout = reader.ReadToEnd(); } catch (OutOfMemoryException e) { pr.Error = "File too big to be a template"; pr.success = false; return pr; } } string[] tokens; int ihold; // {FieldName} at {position} for {size} type {datatype} {DATEFORMAT} {NULL/NOTNULL} // 1 2 3 4 5 6 7 Optional Optional int numTokens = 7; // Split DotOut file on spaces tokens = dotout.Split(new char[] { ' ', '\r', '\n', '\t' }, StringSplitOptions.RemoveEmptyEntries); // To hold each field as described in the DotOut Record rec = new Record(); // Process each token for (int i = 0; (i + 7) <= tokens.GetLength(0); ) { // Field Name Field newField; string name; int start; int size; string type; bool isTemplateIndicator = false; // Name name = tokens[i++]; // Expected "at" token if (tokens[i++].ToUpper() != "AT") { pr.Error = (("Expected \"at\" on line " + (rec.Count + 1).ToString() + " field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return pr; } // Try to parse the position if (!int.TryParse(tokens[i++], out ihold)) { pr.Error = ((@"Unparsable Position on Line " + (rec.Count + 1).ToString() + " field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return pr; } else start = ihold; // Expected "for" token if (tokens[i++].ToUpper() != "FOR") { pr.Error = (("Expected \"for\" on line " + (rec.Count + 1).ToString() + " field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return pr; } // Try to parse the size if (!int.TryParse(tokens[i++], out ihold)) { pr.Error = ((@"Unparsable Field Size on Line " + (rec.Count + 1).ToString() + " Field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return pr; } size = ihold; // Expected "type" token if (tokens[i++].ToUpper() != "TYPE") { pr.Error = (("Expected \"type\" on line " + (rec.Count + 1).ToString() + " field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return pr; } // Get the type type = tokens[i++]; if (type.ToUpper() == "DATE") { // counts the date formatter which follows date // oracle's date format is incomprehensible to our parsers // technically this should be hidden in the datefield object // but it's too abstracted behind the field object to make it // owrth it. besides this counts as a token i++; } bool isNullable = false; if (tokens.Length > i) { for (int j = 0; j < 3; j++) { if (tokens[i].ToUpper() == "SUB_IND") { isTemplateIndicator = true; i++; } // Optional Null paramter if (tokens[i] == "NULL") { isNullable = true; i++; } if (tokens[i] == "NOTNULL") { isNullable = false; i++; } } } // We have our parameters add our new field newField = rec.AddField(name, start, type, size, isTemplateIndicator, isNullable); if (tokens.Length > i + 1) { if (tokens[i].ToUpper() == "SUB") { i++; // Count the "SUB" token rec.NewSubTemplate(tokens[i++]); // start a new template & count the token } // Check for optional rownumber extra bit } if (tokens.Length > i + 2) { if (tokens[i] == "Number" && tokens[i + 1] == "of" && tokens[i + 2] == "Rows:") { i += 3; } } } pr.record = rec; pr.success = true; // We've made our template return pr; }
static private ParseResults ParseTemplate(string filename) { /* * MSIS_ID at 0 for 20 type CHAR * ADJ_IND at 20 for 2 type SMALLINT * TOS at 22 for 2 type SMALLINT * TYPE_CLM at 24 for 2 type SMALLINT * DATE_PAID at 26 for 6 type VARCHAR * AMT_PAID at 32 for 4 type INTEGER NULL * BEG_DOS at 36 for 4 type INTEGER NOTNULL * END_DOS at 40 for 8 type LONG * PROV_ID at 42 for 14 type DATETIME "YYYYMMHH24MMSI" NULL * DOLLA_AMOUNT at 56 for 1 type CHAR SUB_IND * SUB_FILLER at 57 for 150 type FILLER * * SUB J * DATE_PAID at 57 for 6 type VARCHAR * AMT_PAID at 63 for 4 type INTEGER NULL * BLARB_INDICATOR at 67 for 4 type INTEGER SUB_IND * SUB_FILLER at 71 for 75 type FILLER * * SUB 45 * ADJ_IND at 57 for 2 type SMALLINT * TOS at 59 for 2 type SMALLINT * TYPE_CLM at 61 for 2 type SMALLINT * * Indicators * must be at the end and followed by filler to reach full length * * * Number of Rows: 1190205 */ ParseResults pr = new ParseResults(); if (filename == String.Empty) { pr.success = false; pr.Error = "\n Empty File Name \n"; return(pr); } string dotout; using (StreamReader reader = new StreamReader(filename)) { try { dotout = reader.ReadToEnd(); } catch (OutOfMemoryException e) { pr.Error = "File too big to be a template"; pr.success = false; return(pr); } } string[] tokens; int ihold; // {FieldName} at {position} for {size} type {datatype} {DATEFORMAT} {NULL/NOTNULL} // 1 2 3 4 5 6 7 Optional Optional int numTokens = 7; // Split DotOut file on spaces tokens = dotout.Split(new char[] { ' ', '\r', '\n', '\t' }, StringSplitOptions.RemoveEmptyEntries); // To hold each field as described in the DotOut Record rec = new Record(); // Process each token for (int i = 0; (i + 7) <= tokens.GetLength(0);) { // Field Name Field newField; string name; int start; int size; string type; bool isTemplateIndicator = false; // Name name = tokens[i++]; // Expected "at" token if (tokens[i++].ToUpper() != "AT") { pr.Error = (("Expected \"at\" on line " + (rec.Count + 1).ToString() + " field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return(pr); } // Try to parse the position if (!int.TryParse(tokens[i++], out ihold)) { pr.Error = ((@"Unparsable Position on Line " + (rec.Count + 1).ToString() + " field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return(pr); } else { start = ihold; } // Expected "for" token if (tokens[i++].ToUpper() != "FOR") { pr.Error = (("Expected \"for\" on line " + (rec.Count + 1).ToString() + " field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return(pr); } // Try to parse the size if (!int.TryParse(tokens[i++], out ihold)) { pr.Error = ((@"Unparsable Field Size on Line " + (rec.Count + 1).ToString() + " Field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return(pr); } size = ihold; // Expected "type" token if (tokens[i++].ToUpper() != "TYPE") { pr.Error = (("Expected \"type\" on line " + (rec.Count + 1).ToString() + " field " + (rec.Count - ((i - 1) * numTokens)).ToString()) + "\n"); pr.success = false; return(pr); } // Get the type type = tokens[i++]; if (type.ToUpper() == "DATE") { // counts the date formatter which follows date // oracle's date format is incomprehensible to our parsers // technically this should be hidden in the datefield object // but it's too abstracted behind the field object to make it // owrth it. besides this counts as a token i++; } bool isNullable = false; if (tokens.Length > i) { for (int j = 0; j < 3; j++) { if (tokens[i].ToUpper() == "SUB_IND") { isTemplateIndicator = true; i++; } // Optional Null paramter if (tokens[i] == "NULL") { isNullable = true; i++; } if (tokens[i] == "NOTNULL") { isNullable = false; i++; } } } // We have our parameters add our new field newField = rec.AddField(name, start, type, size, isTemplateIndicator, isNullable); if (tokens.Length > i + 1) { if (tokens[i].ToUpper() == "SUB") { i++; // Count the "SUB" token rec.NewSubTemplate(tokens[i++]); // start a new template & count the token } // Check for optional rownumber extra bit } if (tokens.Length > i + 2) { if (tokens[i] == "Number" && tokens[i + 1] == "of" && tokens[i + 2] == "Rows:") { i += 3; } } } pr.record = rec; pr.success = true; // We've made our template return(pr); }
public void InitialiseEntityModel(ArchAngel.Providers.EntityModel.ProviderInfo providerInfo, PreGenerationData data) { providerInfo.MappingSet.CodeParseResults = null; // Clear the current mapped class. providerInfo.MappingSet.EntitySet.Entities.ForEach(e => e.MappedClass = null); // Find the csproj file we are going to use string filename; var csprojDocument = GetCSProjDocument(data, out filename); if (csprojDocument == null) { return; } CSProjFile csproj = new CSProjFile(csprojDocument, filename); var hbmFiles = ProjectLoader.GetHBMFilesFromCSProj(csproj, fileController); // Load HBMs foreach (string hbmFilePath in hbmFiles) { if (!File.Exists(hbmFilePath)) { throw new FileNotFoundException(string.Format("A file is defined is your csproj file [{0}], but it cannot be found: [{1}]", filename, hbmFilePath), hbmFilePath); } } var mappings = hbmFiles.Select(f => MappingFiles.Version_2_2.Utility.Open(f)).ToList(); // Parse the CSharp files var csharpFiles = ProjectLoader.GetCSharpFilesFromCSProj(csproj, fileController); var parseResults = ParseResults.ParseCSharpFiles(csharpFiles); providerInfo.MappingSet.CodeParseResults = parseResults; // Clear the current mapped class. providerInfo.MappingSet.EntitySet.Entities.ForEach(e => e.MappedClass = null); // Map the Entity objects to the parsed Class var entities = providerInfo.MappingSet.EntitySet.Entities.ToDictionary(e => e.Name); foreach (var hm in mappings) { foreach (var hClass in hm.Classes()) { var fullClassName = HibernateMappingHelper.ResolveFullClassName(hClass, hm); var shortClassName = HibernateMappingHelper.ResolveShortClassName(hClass, hm); // try find the entity Entity entity; if (entities.TryGetValue(shortClassName, out entity)) { // try find class in parse results var parsedClass = parseResults.FindClass(fullClassName, entity.Properties.Select(p => p.Name).ToList()); entity.MappedClass = parsedClass; } else { Log.InfoFormat("Could not find entity for class named {0} in the NHibernate project on disk.", shortClassName); } } } // Now, try to map entities that haven't been found yet foreach (var entity in entities.Select(v => v.Value).Where(e => e.MappedClass == null)) { string entityName = entity.Name; // try find class in parse results var parsedClass = parseResults.FindClass(entityName, entity.Properties.Select(p => p.Name).ToList()); entity.MappedClass = parsedClass; } }