/// <summary> /// Converts a csv file into odc. /// </summary> /// <param name="csv">The CSV file to parse</param> /// <param name="highestZone">The highest numbered zone</param> /// <param name="types">The number of types of data per recored</param> /// <param name="zfc">The ZFC we are to produce</param> /// <param name="header">Does this csv file contain a header?</param> /// <param name="offset">How much other data comes before our new entries?</param> public void LoadCSV(string csv, bool header, int offset = 0) { using ( CsvReader reader = new CsvReader( csv ) ) { var dataLength = this.data.Length; if ( header ) reader.LoadLine(); while ( !reader.EndOfFile ) { int length; if ( ( length = reader.LoadLine() ) < 2 ) continue; int origin; reader.Get( out origin, 0 ); if ( ( origin < 0 ) ) continue; if ( origin >= dataLength ) { var temp = new float[origin + 1][]; Array.Copy( this.data, temp, dataLength ); this.data = temp; dataLength = origin + 1; } if ( this.data[origin] == null ) { this.data[origin] = new float[this.Types]; } // add in the offset off the bat int loaded = offset; for ( int i = 1; i < length; i++ ) { reader.Get( out this.data[origin][loaded++], i ); } } } }
public void Start() { GenerationJob[] best = null; using (CsvReader reader = new CsvReader( this.InputResultFile.GetFilePath() )) { best = GetBest( reader ); } OutputBest( best ); }
public void TestCSVReadLine() { using ( CsvReader reader = new CsvReader( TestCSVFileNames[0] ) ) { reader.LoadLine(); //"A,B,C,D,E" string s; for ( int i = 0; i < 5; i++ ) { reader.Get( out s, i ); Assert.AreEqual( new String( (char)( 'A' + i ), 1 ), s ); } } }
public List<Job> CreateJobsForIteration() { var ret = new List<Job>(); var parameters = this.Root.Parameters.ToArray(); using(var reader = new CsvReader(this.ResultFile.GetFilePath())) { int[] columnToParameterMap = CreateParameterMap(reader); var baseParameters = LoadBaseParameters(parameters, reader, columnToParameterMap); ret.Add(CreateZero(baseParameters)); ret.Add(CreateJob(baseParameters)); for(int i = 0; i < baseParameters.Length; i++) { ret.Add(CreateWithout(baseParameters, i)); } } return ret; }
public List<Job> CreateJobsForIteration() { var ret = new List<Job>(); var parameters = this.Root.Parameters.ToArray(); int totalParameters = parameters.Sum(p => p.Names.Length); using ( CsvReader reader = new CsvReader( this.ResultFile.GetFilePath() ) ) { reader.LoadLine(); int columns; while ( reader.LoadLine( out columns ) ) { //+2 for generation and value if ( columns >= totalParameters + 2 ) { var jobParameters = new ParameterSetting[parameters.Length]; var job = new Job() { ProcessedBy = null, Processing = false, Processed = false, Value = float.NaN, Parameters = jobParameters }; var columnIndex = 2; for ( int i = 0; i < parameters.Length; i++ ) { jobParameters[i] = new ParameterSetting() { Names = parameters[i].Names, Minimum = parameters[i].Minimum, Maximum = parameters[i].Maximum }; reader.Get( out jobParameters[i].Current, columnIndex ); columnIndex += parameters[i].Names.Length; } ret.Add( job ); if (NumberOfRows > 0 & ret.Count >= NumberOfRows) { break; } } } } return ret; }
/// <summary> /// Load the data from each given text file and load in all of the data as a /// separate data point in the array. /// </summary> /// <param name="fileName">The name of the file to load in</param> /// <param name="header">Does the file contain a header?</param> /// <returns>Each column converted into an float</returns> internal static float[] LoadAllDataToFloat(string fileName, bool header) { using (var reader = new CsvReader(fileName, true)) { int columns; var data = new List<float>(); while (reader.LoadLine(out columns)) { for (int i = 0; i < columns; i++) { float temp; reader.Get(out temp, i); data.Add(temp); } } return data.ToArray(); } }
public List<Job> CreateJobsForIteration() { var ret = new List<Job>(); var parameters = this.Root.Parameters.ToArray(); using(var reader = new CsvReader(this.ResultFile.GetFilePath())) { int[] columnToParameterMap = CreateParameterMap(reader); var baseParameters = LoadBaseParameters(parameters, reader, columnToParameterMap); ret.Add(CreateZero(baseParameters)); ret.Add(CreateJob(baseParameters)); for(int i = 0; i < baseParameters.Length; i++) { // we will need 4 points to approx the second derivative for now ret.Add(CreateWithOffset(baseParameters, i, -Delta)); ret.Add(CreateWithOffset(baseParameters, i, Delta)); } } return ret; }
public void Load() { List<Segment>[] temp = new List<Segment>[this.NumberOfOccupations]; for ( int i = 0; i < temp.Length; i++ ) { temp[i] = new List<Segment>(); } var numberOfModes = this.Root.Modes.Count; using ( CsvReader reader = new CsvReader( this.InputFile.GetFileName( this.Root.InputBaseDirectory ) ) ) { // burn header reader.LoadLine(); while ( !reader.EndOfFile ) { if ( reader.LoadLine() >= numberOfModes + 5 ) { int occ, os, oe, ds, de; reader.Get( out occ, 0 ); reader.Get( out os, 1 ); reader.Get( out oe, 2 ); reader.Get( out ds, 3 ); reader.Get( out de, 4 ); float[] modeData = new float[numberOfModes]; for ( int i = 0; i < modeData.Length; i++ ) { reader.Get( out modeData[i], 5 + i ); } temp[occ - this.OccupationStartIndex].Add( new Segment() { Origin = new Range() { Start = os, Stop = oe }, Destination = new Range() { Start = ds, Stop = de }, ModificationForMode = modeData } ); } } } this.Data = new Segment[this.NumberOfOccupations][]; for ( int i = 0; i < this.Data.Length; i++ ) { this.Data[i] = temp[i].ToArray(); } }
public void Start() { var zones = Root.ZoneSystem.ZoneArray.GetFlatData(); char[] separators = { ',' }; var invCostPerKM = 1.0f / CostPerKm; foreach(var timePeriod in TimePeriods) { TotalVKT = new Dictionary<int, float>(); var odCostMatrix = timePeriod.ODFlatCostMatrix.AquireResource<SparseTwinIndex<float>>(); using (CsvReader reader = new CsvReader(timePeriod.ODTripsData)) { int columns; reader.LoadLine(); while(reader.LoadLine(out columns)) { if(columns >= 4) { float vkt = 0.0f; int homeZone, origin, destination; float numberOfTrips; reader.Get(out homeZone, 0); reader.Get(out origin, 1); reader.Get(out destination, 2); reader.Get(out numberOfTrips, 3); var distance = odCostMatrix[origin, destination] * invCostPerKM; TotalVKT.TryGetValue(homeZone, out vkt); TotalVKT[homeZone] = vkt + numberOfTrips * distance; } } } using (StreamWriter writer = new StreamWriter(timePeriod.VKTbyHomeZone)) { writer.WriteLine("Home Zone, Total VKTs"); foreach(var pair in TotalVKT) { writer.WriteLine("{0}, {1}", pair.Key, pair.Value); } } } }
private Dictionary<string, float> LoadObservedBoardingsFile() { var result = new Dictionary<string, float>(); using (CsvReader reader = new CsvReader(this.ObservedBoardingsFile.GetFilePath())) { reader.LoadLine(); //Skip the first line int numCol = 2; while (reader.LoadLine(out numCol)) { string lineId = ""; reader.Get(out lineId, 0); if (string.IsNullOrWhiteSpace(lineId)) continue; //Skip over blank lines if (numCol < 2) throw new IndexOutOfRangeException("Observed boardings file is expecting two columns (found " + numCol.ToString() + ")"); float amBoardings = 0.0f; reader.Get(out amBoardings, 1); result[lineId] = amBoardings; } } return result; }
private void LoadFamilies() { WriteToLog("Starting to Load Families"); var personRepo = Repository.GetRepository(RepositoryPerson); var familyRepo = Repository.GetRepository(RepositoryFamily); var householdRepo = Repository.GetRepository(RepositoryHousehold); using (var familyContext = familyRepo.GetMultiAccessContext()) using (var personContext = personRepo.GetMultiAccessContext()) using (var hhldContext = householdRepo.GetMultiAccessContext()) using (var reader = new CsvReader(InitialFamilyFile, true)) { int columns; if (FilesContainHeaders) { reader.LoadLine(); } while (reader.LoadLine(out columns)) { if (columns > 3) { int familyId, dwellingId, ageM, ageF; reader.Get(out familyId, 0); reader.Get(out dwellingId, 2); reader.Get(out ageM, 14); reader.Get(out ageF, 17); // if the family is being used, update the index Family family; if (!familyContext.TryGet(familyId, out family)) { throw new XTMFRuntimeException($"In '{Name}' we tried to load family data for a family that does not exist!"); } // if there is no dwelling we can't initialize them if (dwellingId < 0) { continue; } Household h = hhldContext[dwellingId]; family.Household = h; h.Families.Add(family); BuildFamilyStructure(family, ageM, ageF); } } // Set all single families to have themselves as the female/male head of the household foreach (var fIndex in familyContext.GetKeys()) { var family = familyContext[fIndex]; var persons = family.Persons; if (persons.Count == 1) { if (family.FemaleHead == null && persons[0].Sex == Sex.Female) { family.FemaleHead = persons[0]; } else if (family.MaleHead == null && persons[0].Sex == Sex.Male) { family.MaleHead = persons[0]; } } } } }
private ParameterSetting[] ProcessHeader(CsvReader reader) { List<ParameterSetting> ret = new List<ParameterSetting>(); int columns = reader.LoadLine(); for ( int i = 2; i < columns; i++ ) { string name; reader.Get( out name, i ); ret.Add( new ParameterSetting() { Names = new string[] { name } } ); } return ret.ToArray(); }
public void BeforeFirstYear(int firstYear) { FirstYear = firstYear; // Seed the Random Number Generator RandomStream.CreateRandomStream(ref RandomGenerator, Seed); // load in the data we will use for rates List<float> data = new List<float>(); using (CsvReader reader = new CsvReader(DivorceRatesFile, true)) { int columns; while(reader.LoadLine(out columns)) { if(columns >= 2) { float temp; reader.Get(out temp, 1); data.Add(temp); } } } DivorceData = data.ToArray(); // process the data so to remove all of the divides needed to replicate // BaseSurvival = DivorceData[MarriageDuration]/DivorceData[MarriageDuration - 1] for (int i = DivorceData.Length - 1; i > 0 ; i--) { DivorceData[i] = DivorceData[i] / DivorceData[i - 1]; } }
private void LoadInformation(Dictionary<int, GenerationInformation> generationDictionary) { using (CsvReader reader = new CsvReader(this.EvaluationFileName)) { LoadHeader(reader); int numberOfParameters = this.Headers.Length; while(reader.LoadLine() != 0) { int generation; reader.Get(out generation, 0); var parameters = new float[numberOfParameters + 1]; for(int i = 0; i < numberOfParameters + 1; i++) { // offset 1 for the generation i == 0 is the value reader.Get(out parameters[i], i + 1); } GenerationInformation info; if(!generationDictionary.TryGetValue(generation, out info)) { info = new GenerationInformation(); info.Entries = new List<float[]>(250); generationDictionary.Add(generation, info); } info.Entries.Add(parameters); } } }
private void LoadHeader(CsvReader reader) { int columns; if((columns = reader.LoadLine()) == 0) { return; } this.Headers = new string[columns - 2]; for(int i = 2; i < columns; i++) { reader.Get(out this.Headers[i - 2], i); } // If we have percent distance enabled go through and try to link the headers if(this.PercentDistance) { // if we are doing percent distance try to match the parameters to the header indexes if(this.Headers.Length != this.Parameters.Length) { throw new XTMFRuntimeException("The number of headers did not match the number of parameters!\r\n" + "The Parameter Range file has " + this.Parameters.Length + " and the Estimation Output has " + this.Headers.Length); } for(int i = 0; i < this.Parameters.Length; i++) { bool found = false; for(int j = 0; j < this.Headers.Length; j++) { if(this.Parameters[i].Name == this.Headers[j]) { this.ParameterIndex[j] = i; found = true; break; } } if(!found) { throw new XTMFRuntimeException("We were unable to find a header to match parameter " + Parameters[i].Name + "!"); } } } }
private GenerationJob[] GetBest(CsvReader reader) { ParameterSetting[] parameters = ProcessHeader( reader ); if ( this.Maximize ) { return GetHighestBest( reader, parameters ); } else { return GetLowestBest( reader, parameters ); } }
private GenerationJob ReadJob(CsvReader reader, ParameterSetting[] parameters) { int columns; while ( reader.LoadLine( out columns ) ) { if ( columns >= parameters.Length + 2 ) { var localParameters = CloneParameters( parameters ); var job = new Job() { ProcessedBy = null, Processed = false, Processing = false, Parameters = localParameters }; int generation; reader.Get( out generation, 0 ); GenerationJob genJob = new GenerationJob( job, generation ); // we don't load the generation reader.Get( out job.Value, 1 ); for ( int i = 0; i < localParameters.Length; i++ ) { reader.Get( out localParameters[i].Current, i + 2 ); } return genJob; } } return null; }
private static ParameterSetting[] LoadBaseParameters(ParameterSetting[] parameters, CsvReader reader, int[] columnMap) { var baseParameters = new ParameterSetting[parameters.Length]; // we only read the first line int columns; if(reader.LoadLine(out columns)) { var job = new Job() { ProcessedBy = null, Processing = false, Processed = false, Value = float.NaN, Parameters = baseParameters }; for(int i = 0; i < parameters.Length; i++) { baseParameters[i] = new ParameterSetting() { Names = parameters[i].Names, Minimum = parameters[i].Minimum, Maximum = parameters[i].Maximum }; } for(int i = 0; i < columnMap.Length; i++) { reader.Get(out baseParameters[columnMap[i]].Current, i + 2); } } return baseParameters; }
private int[] CreateParameterMap(CsvReader reader) { var parameters = this.Root.Parameters.ToArray(); int columns; reader.LoadLine( out columns ); var ret = new int[columns - 2]; for ( int i = 2; i < columns; i++ ) { string name; reader.Get( out name, i ); var selectedParameter = ( from p in parameters where p.Names.Contains( name ) select p ).FirstOrDefault(); if ( selectedParameter == null ) { throw new XTMFRuntimeException( "In '" + this.Name + " the parameter '" + name + "' could not be resolved." ); } ret[i - 2] = IndexOf( parameters, selectedParameter ); } return ret; }
public CommentedCsvReader(Stream Stream) { this.Reader = new CsvReader( Stream ); this.linesRead = 0; SetupReader(); }
/// <summary> /// </summary> /// <param name="FileName">The full path to the file.</param> public CommentedCsvReader(string FileName) { this.Reader = new CsvReader( FileName ); this.linesRead = 0; SetupReader(); }
private GenerationJob[] GetLowestBest(CsvReader reader, ParameterSetting[] parameters) { List<GenerationJob> best = new List<GenerationJob>( this.ResultsToSave + 2 ); GenerationJob currentJob; var maxResults = this.ResultsToSave; while ( ( currentJob = ReadJob( reader, parameters ) ) != null ) { int index = -1; // Always accept the first one if ( best.Count > 0 ) { //check the last one first since they are in order to see if we need to check each one if ( currentJob.Job.Value < best[best.Count - 1].Job.Value ) { for ( int i = 0; i < best.Count; i++ ) { if ( currentJob.Job.Value < best[i].Job.Value ) { index = i; break; } } } } if ( index >= 0 ) { best.Insert( index, currentJob ); if ( best.Count > maxResults ) { best.RemoveAt( best.Count - 1 ); } } // if we don't have enough just add else if ( index == -1 & best.Count < maxResults ) { best.Add( currentJob ); } } return best.ToArray(); }
private SparseArray<float> BuildDistrictRadius() { List<int> districts = new List<int>(); List<float> radiusList = new List<float>(); using ( CsvReader reader = new CsvReader( this.GetFullPath( this.DistrictRadiusFile ) ) ) { // burn header reader.LoadLine( ); while ( !reader.EndOfFile ) { // make sure that we actually loaded data in if ( reader.LoadLine( ) == 0 ) { continue; } // if we have data process it int district; float radius; reader.Get( out district, 0 ); reader.Get( out radius, 1 ); districts.Add( district ); radiusList.Add( radius ); } } return SparseArray<float>.CreateSparseArray( districts.ToArray(), radiusList ); }
private Dictionary<string, Tuple<float, float>> LoadObservedBoardingsFile() { var result = new Dictionary<string, Tuple<float, float>>(); using (CsvReader reader = new CsvReader(this.ObservedBoardingsFile.GetFilePath())) { reader.LoadLine(); //Skip the first line int numCol = 3; while (reader.LoadLine(out numCol)) { if (numCol < 3) throw new IndexOutOfRangeException("Observed boardings file is expecting two columns (found " + numCol.ToString() + ")"); string lineId = ""; float weight = 0.0f; float amBoardings = 0.0f; reader.Get(out lineId, 0); reader.Get(out weight, 1); reader.Get(out amBoardings, 2); if (amBoardings <= 0.0f) throw new Exception("Found 0 boardings for line/group " + lineId + ". All values must be greater than 0 in order to calculate WMPE"); result[lineId] = new Tuple<float, float>(weight, amBoardings); } } return result; }
private void LoadDwellings() { WriteToLog("Starting to Load Dwellings/Households"); var householdRepo = Repository.GetRepository(RepositoryHousehold); var dwellingRepo = Repository.GetRepository(RepositoryDwellings); var initialDate = new Date(InitialYear, 0); var zoneSystem = Repository.GetRepository(ZoneSystem); using (var reader = new CsvReader(InitialHouseholdFile, true)) { int columns; if (FilesContainHeaders) { reader.LoadLine(); } while (reader.LoadLine(out columns)) { /* int dwellingid, pumhid, ctcode, tts96, prov, urbru, cmapust, weight, hhinda, hhindb, hhpera, hhperb1, hhperb2, hhperd, hhpere, hhperf, hhperg, hhperh, hhsize, hhcomp, hhnonfam, hhnuef, hhnuldg, hhnuempi, hhnutoti, hhmsinc, hhempinc, hhnetinv, hhgovinc, hhotinc, hhtotinc, dtypeh, builth, tenurh, morg, rcondh, room, heath, fuelhh, valueh, grosrth, renth, omph, mppit,hmage, hmsex, hmmarst, hmefamst, hmbirtpl, hmethnic, hmimmig, hhmotg, hmofflg, hmmob5, hmhlos, hmocc81, hmlfact, hmcow, hmwkswk, hmfptwk, hmmsinc, hmempinc, hmnetinv, hmgovinc, hmotinc, hmtotinc, spage, spsex, spbirtpl, spethnic, spmotg, spofflg, spimmig, spmob5, sphlos, spocc81, splfact, spcow, spwkswk, spfptwk, spmsinc, spempinc, spnetinv, spgovinc, spotinc, sptotinc, efsize, efadult, efpersgh, efpersa, efpersb, efpersc, efpersd, efcomp, efnuempi, efnutoti, efloinc, efmsinc, efempinc, efnetinv, efgovinc, efotinc, eftotinc, id; */ if (columns > 39) { int dwellingid, ctcode, hhcomp, dtype, tenur, rooms, value; reader.Get(out dwellingid, 0); reader.Get(out ctcode, 2); reader.Get(out hhcomp, 19); reader.Get(out dtype, 31); reader.Get(out tenur, 33); reader.Get(out rooms, 36); reader.Get(out value, 39); Household h = new Household(); Dwelling d = new Dwelling(); householdRepo.AddNew(dwellingid, h); dwellingRepo.AddNew(dwellingid, d); h.Dwelling = d; h.HouseholdType = ConvertHouseholdType(hhcomp); d.Exists = true; d.Zone = zoneSystem.GetFlatIndex(ctcode); d.Rooms = rooms; d.Value = new Money(value, initialDate); h.Tenure = ConvertTenureFromCensus(tenur); } } } }
private List<Family> BuildFamilyPool(int deltaYear, Rand rand) { var pool = new List<Family>(); // Index of data loaded in ILUTE standardized variables // 0 icma Census Metropolitian Area (CMA) // 1 icfstruc Census Family Structure // 2 icfsize Number of Persons in the Census Family // 3 inuchild Number of Never-married S/D in CF at Home // 4 ichilda No. of NevMar S/D in CF at Home < 6 Years of Age // 5 ichildb No. of NevMar S/D in CF at Home 6-14 Years of Age // 6 ichildc No. of NevMar S/D in CF at Home 15-17 Years of A // 7 ichildd No. of NevMar S/D in CF at Home 18-24 Years of A // 8 ichilde No. of NevMar S/D in CF at Home 25 Years or Over // 9 itotalc Total Income of CF or Non-family Person // 10 inucfinc No. of Income Recipients in CF or Non-family Per // 11 iwagesc Wages and Salaries of CF or Non-family Person // 12 itotalm Total Income of H/MCLP/MLP in CF // 13 iwagem Wages and Salaries of H/MCLP/MLP in CF // 14 iagem Age of H/MCLP/MLP/Male NF Person (85=85+) // 15 imarsthm Hist. Comparison Legal Marital Status of Male - Husbands, Common Law Parent/Male Lone Parent or Male NonFam Person // 16 ihgradm Highest Grade Elem/Sec. of H/MCLP/MLP/MNF Person ( ALL MALES) // 17 ihlosm Highest Level of Sch. of H/MCLP/MLP or Male NFP (ALL MALES) // 18 itruncm Trades/Other Non-univ. Cert. of H/MCLP/MLP/MNFP (sec. Cert = high school) // 19 idgmfsm Major Field of Study of H/MCLP/MLP or Male NFP // 20 itotschm Total Years of Schooling of H/MCLP/MLP or Male N // 21 imob1m Mobility Status - 1 Year Ago of H/MCLP/MLP/MNFP // 22 ilfactm LF Activity of H/MCLP/MLP or Male NF Person // 23 iocc80m Occupation (1980 Class.) of H/MCLP/MLP/MNFP // 24 iind80m Industry (1980 SIC) of H/MCLP/MLP/MNFP // 25 iagef Age of W/FCLP/FLP/Female NF Person (85=85+) // 26 imarsthf Hist. Comparison Legal Marital Status of Female - Wives, Common Law Parent/Female Lone Parent or Female NonFam Person // 27 itotalf Total Income of W/FCLP/FLP in CF (ALL FEMALES) // 28 iwagef Wages and Salaries of H/MCLP/MLP in CF // 29 ihgradf Highest Grade Elem/Sec. of W/FCLP/FLP/FNF Person ( ALL FEMALES) // 30 ihlosf Highest Level of Sch. of W/FCLP/FLP or Female NFP (ALL FEMALES) // 31 itruncm Trades/Other Non-univ. Cert. of W/FCLP/FLP/FNFP (sec. Cert = high school) // 32 idgmfsf Major Field of Study of W/FCLP/FLP or Female NFP // 33 itotschf Total Years of Schooling of W/FCLP/FLP or Female NFP // 34 imob1f Mobility Status - 1 Year Ago of W/FCLP/FLP/FNFP // 35 ilfactf LF Activity of W/FCLP/FLP or Female NF Person // 36 iocc80f Occupation (1980 Class.) of W/FCLP/FLP/FNFP // 37 iind80f Industry (1980 SIC) of W/FCLP/FLP/FNFP // 38 itenurc Tenure // 39 igrosrtc Monthly Gross Rent using (var reader = new CsvReader(YearlyFamilyData[deltaYear], true)) { int columns; List<Person> children = new List<Person>(); while (reader.LoadLine(out columns)) { if (columns >= 39) { var createMale = false; var createFemale = false; int familyStructure, ageM, ageF, childrenA, childrenB, childrenC, childrenD, childrenE; reader.Get(out familyStructure, 1); reader.Get(out ageM, 14); reader.Get(out ageF, 25); if (familyStructure > 0 && familyStructure < 5) { createMale = createFemale = true; } else if (familyStructure == 5 && ageM != 99) { createMale = true; } else if (familyStructure == 6 && ageF != 99) { createFemale = true; } else { // this household record is invalid, just continue continue; } // get the number of children reader.Get(out childrenA, 4); reader.Get(out childrenB, 5); reader.Get(out childrenC, 6); reader.Get(out childrenD, 7); reader.Get(out childrenE, 8); var family = new Family(); Person male = null, female = null; if (createMale) { male = CreatePerson(0, AgeFromAdultAgeCategory(rand.Take(), ageM), 2, (createMale && createFemale ? 2 : 4)); family.Persons.Add(male); male.Family = family; family.MaleHead = male; } if (createFemale) { female = CreatePerson(0, AgeFromAdultAgeCategory(rand.Take(), ageF), 1, (createMale && createFemale ? 2 : 4)); family.Persons.Add(female); female.Family = family; family.FemaleHead = female; } if (male != null && female != null) { male.Spouse = female; female.Spouse = male; } pool.Add(family); // Create children for each age range rand.NextFloat = [0,1) if (childrenA > 0 || childrenB > 0 || childrenC > 0 || childrenD > 0 || childrenE > 0) { for (int i = 0; i < childrenA; i++) { children.Add( CreatePerson(0, (int)(0.0f + rand.Take() * 6.0f), rand.Take() < 0.5f ? 2 : 1, 4)); } for (int i = 0; i < childrenB; i++) { children.Add( CreatePerson(0, (int)(6.0f + rand.Take() * 9.0f), rand.Take() < 0.5f ? 2 : 1, 4)); } for (int i = 0; i < childrenC; i++) { children.Add( CreatePerson(0, (int)(15.0f + rand.Take() * 3.0f), rand.Take() < 0.5f ? 2 : 1, 4)); } for (int i = 0; i < childrenD; i++) { children.Add( CreatePerson(0, (int)(18.0f + rand.Take() * 7.0f), rand.Take() < 0.5f ? 2 : 1, 4)); } for (int i = 0; i < childrenE; i++) { children.Add(CreatePerson(0, 25, rand.Take() < 0.5f ? 2 : 1, 4)); } male?.Children.AddRange(children); female?.Children.AddRange(children); foreach (var child in children) { child.Father = male; child.Mother = female; child.Family = family; foreach (var otherChild in children) { if (child != otherChild) { child.Siblings.Add(otherChild); } } family.Persons.Add(child); } // now that everything is copied over we can release the children children.Clear(); } } } } return pool; }
private void LoadPersons() { WriteToLog("Starting to Load Persons"); var personRepo = Repository.GetRepository(RepositoryPerson); var familyRepo = Repository.GetRepository(RepositoryFamily); int personsWithNegativeFamilyIndex = 0; List<Family> toAddAfterwards = new List<Family>(); using (var reader = new CsvReader(InitialPersonFile, true)) { /* (Columns) 00 personid, pumiid, familyid, dwellingid, prov, cmapust, hhclass, htype, unitsp, hhincp, 10 ompp, grosrtp, rentp, hhstat, efstat, efsize, cfstat, cfsize, mscfinc, cfincp, 20 agep, sexp, marstp, mob5p, pr5p, lfact71, lfact, hrswk, lstwkp, wkswk, 30 fptwk, preschp, occ81p, occ71p, ind80p, ind70p, cowp, hlosp, hgrad, psuv, 40 psot, trnuc, dgree, dgmfs, ethnicor, vismin, abethnic, duethnic, geethnic, scethnic, 50 huethnic, poethnic, ukethnic, crethnic, grethnic, itethnic, prethnic, jeethinc, waethnic, saethnic, 60 chethnic, fiethnic, eaethnic, blethnic, birtplac, citizens, yrimmig, immigage, offlang, homelang, 70 mothertg, totincp, wagesp, selfip, invstp, oasgip, cqppbp, famalp, chdcrp, uicbnp, 80 govtip, retirp, otincp, hmainp, tenurp, rcondp, valuep, room, id; */ // there is no header at the moment so we don't need to burn a line int columns; if (FilesContainHeaders) { reader.LoadLine(); } while (reader.LoadLine(out columns)) { if (columns >= 89) { int personid, familyid, dwellingid, hhstat, cfstat, agep, sexp, marstp, lfact, occ81p, ind80p, totincp, hlosp; int dgmfs, psuv, psot, trnuc, dgree; reader.Get(out personid, 0); reader.Get(out familyid, 2); reader.Get(out dwellingid, 3); reader.Get(out hhstat, 13); reader.Get(out cfstat, 16); reader.Get(out agep, 20); reader.Get(out sexp, 21); reader.Get(out marstp, 22); reader.Get(out lfact, 26); reader.Get(out occ81p, 32); reader.Get(out ind80p, 34); reader.Get(out totincp, 71); reader.Get(out hlosp, 37); reader.Get(out dgmfs, 43); reader.Get(out psuv, 39); reader.Get(out psot, 40); reader.Get(out trnuc, 41); reader.Get(out dgree, 42); Family personsFamily; // if they are living alone create a new family for them if (familyid < 0) { // if the person has no family and no dwelling just continue // this would mean that they live in a collective if (dwellingid < 0) { continue; } personsWithNegativeFamilyIndex++; personsFamily = new Family(); toAddAfterwards.Add(personsFamily); } else if (!familyRepo.TryGet(familyid, out personsFamily)) { // otherwise create the new family personsFamily = new Family(); familyRepo.AddNew(familyid, personsFamily); } Person p; //TODO: Finish filling out the personal information for this individual personRepo.AddNew(personid, (p = new Person() { Age = agep, Family = personsFamily, Living = true, Sex = sexp == 2 ? Sex.Male : Sex.Female })); // add the person to their family personsFamily.Persons.Add(p); } } // fill in the rest foreach (var family in toAddAfterwards) { familyRepo.AddNew(family); } WriteToLog("Total number of families loaded: " + familyRepo.Count); WriteToLog("Total number of persons loaded: " + personRepo.Count); } }
private List<Family> BuildIndividualPool(int deltaYear, Rand rand) { var pool = new List<Family>(); // Index of data loaded in ILUTE standardized variables // 0 Census Metropolitian Area (CMA) // 1 Household Type // 2 Houshold Size // 3 Census Family Status // 4 Number of Persons in the Census Family // 5 Age // 6 Sex // 7 Legal Marital Status for individual (HISTORICAL INDICATOR) // 8 Highest Grade of Elementary/Sec School // 9 Highest Level of Schooling (note: non-univ = college) // 10 Trades and Other Non-University Certification (sec. cert = high school graduation, non univ = college) // 11 Highest Degree, Certificate or Diploma // 12 Major Field of Study // 13 Total Years of Schooling // 14 Mobility Status - 1 Year Ago (Place of Residence) // 15 Labour Force Activity // 16 Occupation (1980 Classification Basis) // 17 Industry (1980 Standard Industrial Classification) // 18 Total Income // 19 Wages and Salaries // 20 Tenure // 21 Monthly Gross Rent using (var reader = new CsvReader(YearlyIndividualsData[deltaYear])) { int columns; while (reader.LoadLine(out columns)) { if (columns >= 22) { int age, sex, maritalStatus; // read in the age reader.Get(out age, 5); // make sure that this record is old enough before loading it in if (age < Parent.AgeOfMaturity) { continue; } // get sex reader.Get(out sex, 6); reader.Get(out maritalStatus, 7); var person = CreatePerson(rand.Take(), age, sex, maritalStatus); if (person.MaritalStatus == MaritalStatus.Married) { person.MaritalStatus = MaritalStatus.Single; } var family = new Family(); family.Persons.Add(person); pool.Add(family); } } } return pool; }
/// <summary> /// Load the probabilities from file /// </summary> /// <param name="zoneSystem">The zone system the model is using</param> private void LoadProbabilities(SparseArray<IZone> zoneSystem) { var zones = zoneSystem.GetFlatData(); AutoProbabilities = new float[zones.Length]; TransitProbabilities = new float[zones.Length]; TotalTrips = new float[zones.Length]; using (CsvReader reader = new CsvReader(ModeSplitTruthData)) { // burn header reader.LoadLine(); // read in the rest of the data int columns; while(reader.LoadLine(out columns)) { if(columns >= 3) { int zone; reader.Get(out zone, 0); zone = zoneSystem.GetFlatIndex(zone); if(zone >= 0) { float auto, transit, totalTrips; reader.Get(out auto, 1); reader.Get(out transit, 2); reader.Get(out totalTrips, 3); AutoProbabilities[zone] = auto; TransitProbabilities[zone] = transit; TotalTrips[zone] = totalTrips; } } } } }
private float[][][] BuildData(string[] modeNames, SparseArray<IZone> zoneSystem, SparseArray<int> regions) { var zones = zoneSystem.GetFlatData(); var modes = Root.AllModes.ToArray(); var data = new float[modes.Length][][]; var numberOfRegions = regions.GetFlatData().Length; for(int i = 0; i < data.Length; i++) { var row = data[i] = new float[numberOfRegions][]; for(int j = 0; j < row.Length; j++) { row[j] = new float[numberOfRegions]; } } using (CsvReader reader = new CsvReader(ZonalModeSplitFile)) { // burn header reader.LoadLine(); int columns; while(reader.LoadLine(out columns)) { // ignore lines without the right number of columns if(columns == 4) { string modeName; int originZone, destinationZone; float expandedPersons; reader.Get(out modeName, 0); reader.Get(out originZone, 1); reader.Get(out destinationZone, 2); reader.Get(out expandedPersons, 3); data[ModeIndex(modeName, modeNames)][regions.GetFlatIndex(zoneSystem[originZone].PlanningDistrict)][regions.GetFlatIndex(zoneSystem[destinationZone].PlanningDistrict)] += expandedPersons; } } } return data; }