public SparseTwinIndex <float> ProcessFlow(SparseArray <float> O, SparseArray <float> D, int[] validIndexes, SparseArray <float> attractionStar = null) { int length = validIndexes.Length; Productions = O; Attractions = D; if (attractionStar == null) { AttractionsStar = D.CreateSimilarArray <float>(); } else { AttractionsStar = attractionStar; } FlowMatrix = Productions.CreateSquareTwinArray <float>(); if (Friction == null) { InitializeFriction(length); } var flatAttractionStar = AttractionsStar.GetFlatData(); float[] oldTotal = new float[flatAttractionStar.Length]; var flatAttractions = Attractions.GetFlatData(); for (int i = 0; i < length; i++) { flatAttractionStar[i] = 1f; oldTotal[i] = flatAttractions[i]; } int iteration = 0; float[] columnTotals = new float[length]; var balanced = false; do { if (ProgressCallback != null) { // this doesn't go to 100%, but that is alright since when we end, the progress // of the calling model should assume we hit 100% ProgressCallback(iteration / (float)MaxIterations); } Array.Clear(columnTotals, 0, columnTotals.Length); if (Vector.IsHardwareAccelerated) { VectorProcessFlow(columnTotals, FlowMatrix.GetFlatData()); } else { ProcessFlow(columnTotals); } balanced = Balance(columnTotals, oldTotal); } while((++iteration) < MaxIterations && !balanced); if (ProgressCallback != null) { ProgressCallback(1f); } return(FlowMatrix); }
public override void Generate(SparseArray <float> production, SparseArray <float> attractions) { if (LoadData) { this.LoadExternalWorkerRates.LoadData(); this.LoadWorkAtHomeRates.LoadData(); this.LoadExternalJobsRates.LoadData(); this.ExternalRates = this.LoadExternalWorkerRates.GiveData(); this.WorkAtHomeRates = this.LoadWorkAtHomeRates.GiveData(); this.ExternalRates = this.LoadExternalJobsRates.GiveData(); } var flatProduction = production.GetFlatData(); var flatWah = new float[flatProduction.Length]; var flatIntraZonal = new float[flatProduction.Length]; float elfgta = CalculateElfGTA(); var totalProduction = ComputeProduction(flatProduction, flatWah, flatIntraZonal); var totalAttraction = ComputeAttraction(attractions.GetFlatData()); ApplyWahAndIntrazonal(production.GetFlatData(), attractions.GetFlatData(), flatWah, flatIntraZonal, totalProduction, totalAttraction, elfgta); ApplyAgeCategoryFactor(production.GetFlatData(), attractions.GetFlatData()); Normalize(production.GetFlatData(), attractions.GetFlatData()); WriteGenerationFile(flatProduction.Sum(), attractions.GetFlatData().Sum()); WriteAttractionFile(production, attractions); if (LoadData) { this.LoadExternalWorkerRates.UnloadData(); this.LoadWorkAtHomeRates.UnloadData(); this.LoadExternalJobsRates.UnloadData(); WorkAtHomeRates = null; ExternalRates = null; ExternalJobs = null; } }
public void IterationFinished(int iteration) { var flatRegion = HouseholdsByRegion.GetFlatData(); var zoneArray = Root.ZoneSystem.ZoneArray; var zones = zoneArray.GetFlatData(); RandomizeHouseholdOrder(); var basePopulation = Root.ZoneSystem.ZoneArray.GetFlatData().Select(z => z.Population).ToArray(); int[] zonalDifferences = BuildScenarioDifferencesByZone(basePopulation); List <KeyValuePair <int, int> >[] results = new List <KeyValuePair <int, int> > [flatRegion.Length]; for (int i = 0; i < results.Length; i++) { results[i] = new List <KeyValuePair <int, int> >(); } // make a copy of the households by region so we can subtract out the households that have already been used // we can't just edit the households by region because we need them for indexing List <ITashaHousehold>[] remainingHouseholds = new List <ITashaHousehold> [HouseholdsByRegion.Count]; List <int>[] lookupsForRegion = new List <int> [remainingHouseholds.Length]; for (int i = 0; i < remainingHouseholds.Length; i++) { remainingHouseholds[i] = new List <ITashaHousehold>(); lookupsForRegion[i] = new List <int>(); } //Step 1 fill up the zones with residences that will stay Pass1(zoneArray, zones, basePopulation, zonalDifferences, results, remainingHouseholds, lookupsForRegion); Pass2(zones, zonalDifferences, results, remainingHouseholds, lookupsForRegion); Console.WriteLine("Saving results"); Save(results, flatRegion); }
public override void Generate(SparseArray <float> production, SparseArray <float> attractions) { if (LoadData) { LoadExternalWorkerRates.LoadData(); LoadWorkAtHomeRates.LoadData(); LoadExternalJobsRates.LoadData(); ExternalRates = LoadExternalWorkerRates.GiveData(); WorkAtHomeRates = LoadWorkAtHomeRates.GiveData(); ExternalRates = LoadExternalJobsRates.GiveData(); } var flatProduction = production.GetFlatData(); var flatWah = new float[flatProduction.Length]; var totalProduction = ComputeProduction(flatProduction, flatWah); var totalAttraction = ComputeAttraction(attractions.GetFlatData()); Normalize(production.GetFlatData(), attractions.GetFlatData(), totalProduction, totalAttraction); totalAttraction = RemoveWAHFromAttraction(attractions.GetFlatData(), flatWah); StoreProductionData(production); WriteGenerationFile(totalProduction, totalAttraction); WriteAttractionFile(attractions); if (LoadData) { LoadExternalWorkerRates.UnloadData(); LoadWorkAtHomeRates.UnloadData(); LoadExternalJobsRates.UnloadData(); WorkAtHomeRates = null; ExternalRates = null; ExternalJobs = null; } }
public override void Generate(SparseArray<float> production, SparseArray<float> attractions) { if(LoadData) { LoadExternalWorkerRates.LoadData(); LoadWorkAtHomeRates.LoadData(); LoadExternalJobsRates.LoadData(); ExternalRates = LoadExternalWorkerRates.GiveData(); WorkAtHomeRates = LoadWorkAtHomeRates.GiveData(); ExternalRates = LoadExternalJobsRates.GiveData(); } var flatProduction = production.GetFlatData(); var flatWah = new float[flatProduction.Length]; var totalProduction = ComputeProduction(flatProduction, flatWah); var totalAttraction = ComputeAttraction(attractions.GetFlatData()); Normalize(production.GetFlatData(), attractions.GetFlatData(), totalProduction, totalAttraction); totalAttraction = RemoveWAHFromAttraction(attractions.GetFlatData(), flatWah); StoreProductionData(production); WriteGenerationFile(totalProduction, totalAttraction); WriteAttractionFile(attractions); if(LoadData) { LoadExternalWorkerRates.UnloadData(); LoadWorkAtHomeRates.UnloadData(); LoadExternalJobsRates.UnloadData(); WorkAtHomeRates = null; ExternalRates = null; ExternalJobs = null; } }
private bool Balance(float[] columnTotals) { var flatAttractions = Attractions.GetFlatData(); var flatAttractionStar = AttractionsStar.GetFlatData(); float ep = Epsilon; VectorHelper.Divide(columnTotals, 0, flatAttractions, 0, columnTotals, 0, columnTotals.Length); VectorHelper.Multiply(flatAttractionStar, 0, flatAttractionStar, 0, columnTotals, 0, flatAttractionStar.Length); VectorHelper.ReplaceIfNotFinite(flatAttractionStar, 0, 1.0f, flatAttractionStar.Length); return(VectorHelper.AreBoundedBy(columnTotals, 0, 1.0f, ep, columnTotals.Length)); }
/// <summary> /// Compute the mode splits for each zone /// </summary> /// <returns>The results from the mode choice model</returns> private ModeSplitUtilities[] ComputeModeSplits() { var zones = ZoneSystem.GetFlatData(); var utilities = new ModeSplitUtilities[zones.Length]; for (int i = 0; i < utilities.Length; i++) { ComputeModeSplitForZone(i, utilities); } return(utilities); }
private IEnumerable <SparseTwinIndex <float> > CPUDoublyConstrained(IZone[] zones, IEnumerator <SparseArray <float> > ep, IEnumerator <SparseArray <float> > ea, IEnumerator <IDemographicCategory> ec) { float completed = 0f; var frictionSparse = this.Root.ZoneSystem.ZoneArray.CreateSquareTwinArray <float>(); var productions = new List <SparseArray <float> >(); var attractions = new List <SparseArray <float> >(); var cats = new List <IDemographicCategory>(); // We need to pre load all of our generations in order to handle blending properly while (ep.MoveNext() & ea.MoveNext() & ec.MoveNext()) { productions.Add(ep.Current); attractions.Add(ea.Current); cats.Add(ec.Current); } var ret = this.Root.ZoneSystem.ZoneArray.CreateSquareTwinArray <float>(); SparseArray <float> production = this.Root.ZoneSystem.ZoneArray.CreateSimilarArray <float>(); SparseArray <float> attraction = this.Root.ZoneSystem.ZoneArray.CreateSimilarArray <float>(); this.CurrentMultiSetIndex = -1; foreach (var multiset in this.MultiBlendSets) { this.CurrentMultiSetIndex++; var numberOfSubsets = multiset.Subsets.Count; var productionSet = new float[numberOfSubsets][][]; var attractionSet = new float[numberOfSubsets][][]; var multiCatSet = new IDemographicCategory[numberOfSubsets][]; SetupFrictionData(productions, attractions, cats, multiset, productionSet, attractionSet, multiCatSet); this.ComputeFriction(zones, multiCatSet, productionSet, attractionSet, frictionSparse.GetFlatData(), production.GetFlatData(), attraction.GetFlatData()); string balanceFileName; SparseArray <float> balanceFactors = GetWarmBalancingFactors(attraction, out balanceFileName); if (this.CullSmallValues) { var tempValues = new GravityModel(frictionSparse, null, this.Epsilon, this.MaxIterations) .ProcessFlow(production, attraction, production.ValidIndexArray(), balanceFactors); this.Cull(tempValues, frictionSparse.GetFlatData(), production.GetFlatData(), attraction.GetFlatData()); if (!String.IsNullOrWhiteSpace(this.SaveFrictionFileName)) { this.SaveFriction(frictionSparse.GetFlatData()); } } yield return(new GravityModel(frictionSparse, (p => this.Progress = (p * (1f / (this.MultiBlendSets.Count)) + (completed / (this.MultiBlendSets.Count)))), this.Epsilon, this.MaxIterations) .ProcessFlow(production, attraction, production.ValidIndexArray(), balanceFactors)); if (balanceFileName != null) { SaveBalanceFactors(balanceFileName, balanceFactors); } completed += 1f; } }
private void ProcessFlow(float[] columnTotals) { Parallel.For(0, Productions.GetFlatData().Length, new ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount }, () => new float[columnTotals.Length], (int flatOrigin, ParallelLoopState state, float[] localTotals) => { float sumAF = 0; var flatProductions = Productions.GetFlatData(); var flatFriction = Friction.GetFlatData(); var flatAStar = AttractionsStar.GetFlatData(); var flatAttractions = Attractions.GetFlatData(); var length = flatFriction.Length; var flatFrictionRow = flatFriction[flatOrigin]; // check to see if there is no production, if not skip this if (flatProductions[flatOrigin] > 0) { // if there is production continue on for (int i = 0; i < flatFrictionRow.Length; i++) { sumAF += flatFrictionRow[i] * (flatAttractions[i] * flatAStar[i]); } sumAF = (1 / sumAF) * flatProductions[flatOrigin]; if (float.IsInfinity(sumAF) | float.IsNaN(sumAF)) { // this needs to be 0f, otherwise we will be making the attractions have to be balanced higher sumAF = 0f; } var flatFlowsRow = FlowMatrix.GetFlatData()[flatOrigin]; for (int i = 0; i < flatFlowsRow.Length; i++) { var temp = (flatFrictionRow[i] * (sumAF * flatAttractions[i] * flatAStar[i])); temp = float.IsInfinity(temp) | float.IsNaN(temp) ? 0 : temp; localTotals[i] += temp; flatFlowsRow[i] = temp; } } return(localTotals); }, (float[] localTotals) => { lock (columnTotals) { for (int i = 0; i < localTotals.Length; i++) { columnTotals[i] += localTotals[i]; } } }); }
private bool Balance(float[] columnTotals, float[] oldTotal) { bool balanced = true; var flatAttractions = Attractions.GetFlatData(); var flatFlows = FlowMatrix.GetFlatData(); var flatAttractionStar = AttractionsStar.GetFlatData(); int length = flatAttractions.Length; float ep = (float)Epsilon; VectorHelper.Divide(columnTotals, 0, flatAttractions, 0, columnTotals, 0, columnTotals.Length); VectorHelper.Multiply(flatAttractionStar, 0, flatAttractionStar, 0, columnTotals, 0, flatAttractionStar.Length); VectorHelper.ReplaceIfNotFinite(flatAttractionStar, 0, 1.0f, flatAttractionStar.Length); return(balanced = VectorHelper.AreBoundedBy(columnTotals, 0, 1.0f, ep, columnTotals.Length)); }
private void WriteTotalEmployment() { if (TotalEmploymentByZone == null) { return; } var flatZones = _zones.GetFlatData(); using (var writer = new StreamWriter(TotalEmploymentByZone)) { writer.WriteLine("Zone,Employment"); for (int i = 0; i < flatZones.Length; i++) { if (!ExternalPDs.Contains(flatZones[i].PlanningDistrict)) { writer.Write(flatZones[i].ZoneNumber); writer.Write(','); var acc = 0.0f; for (int emp = 0; emp < _zonalEmployment.Length; emp++) { for (int occ = 0; occ < _zonalEmployment[emp].Length; occ++) { acc += _zonalEmployment[emp][occ][i]; } } writer.WriteLine(acc); } } } }
private SparseArray <float> GetWarmBalancingFactors(SparseArray <float> attraction, out string balanceFileName) { SparseArray <float> balanceFactors = null; if (BalanceFactors.ContainsFileName()) { balanceFileName = BalanceFactors.GetFileName() + CurrentMultiSetIndex + ".bin"; if (File.Exists(balanceFileName)) { balanceFactors = LoadBalanceFactors(balanceFileName); } else { balanceFactors = attraction.CreateSimilarArray <float>(); var flatFactors = balanceFactors.GetFlatData(); // initialize the factors to 1 for (int i = 0; i < flatFactors.Length; i++) { flatFactors[i] = 1.0f; } } } else { balanceFileName = null; } return(balanceFactors); }
public float CalculateV(IZone origin, IZone destination, Time time) { CheckInterchangeZone(); var zoneArray = Root.ZoneSystem.ZoneArray; var flatOrigin = zoneArray.GetFlatIndex(origin.ZoneNumber); var flatDestination = zoneArray.GetFlatIndex(destination.ZoneNumber); var flatInterchange = zoneArray.GetFlatIndex(InterchangeZone.ZoneNumber); // Make sure that this is a valid trip first var toDestinationTime = Second.InVehicleTravelTime(flatInterchange, flatDestination, time).ToMinutes(); if (toDestinationTime > MaxAccessToDestinationTime) { return(float.NaN); } float v = LogParkingFactor * LogOfParking; if (ClosestZone.GetFlatData()[flatOrigin]) { v += Closest; } // calculate this second in case the toDestinationTime is invalid // Cost of accessing the station v += AccessInVehicleTravelTime * First.TravelTime(flatOrigin, flatInterchange, time).ToMinutes() + (AccessCost * (First.TravelCost(flatOrigin, flatInterchange, time) + FareTTC)); // Station to Destination time v += InVehicleTravelTime * toDestinationTime; // Walk Time v += WalkTime * Second.WalkTime(flatInterchange, flatDestination, time).ToMinutes(); return(v); }
private static SparseArray <IZone> InitializeZones(SparseArray <Node> nodes) { var flatNodes = nodes.GetFlatData(); List <int> centroids = new List <int>(); for (int i = 0; i < flatNodes.Length; i++) { if (flatNodes[i].IsCentroid) { centroids.Add(flatNodes[i].Number); } } var centroidIndexes = centroids.ToArray(); var flatzones = new IZone[centroidIndexes.Length]; for (int i = 0; i < centroidIndexes.Length; i++) { HOTZone zone = new HOTZone(); var node = nodes[centroidIndexes[i]]; // update the centroid indexes to the sparse space zone.ZoneNumber = (short)centroidIndexes[i]; zone.X = node.X; zone.Y = node.Y; flatzones[i] = zone; } return(SparseArray <IZone> .CreateSparseArray(centroidIndexes, flatzones)); }
public void IterationFinished(int iteration, int totalIterations) { var results = Results; var zoneNumbers = Zones.GetFlatData().Select(z => z.ZoneNumber.ToString()).ToArray(); var stationIndexStr = StationIndex.Select(z => z.ToString()).ToArray(); using (var writer = new StreamWriter(SaveTo)) { writer.WriteLine("Station,Origin,Destination,Trips"); for (int sIndex = 0; sIndex < results.Length; sIndex++) { for (int o = 0; o < results[sIndex].Length; o++) { for (int d = 0; d < results[sIndex][o].Length; d++) { if (results[sIndex][o][d] > 0.0f) { writer.Write(stationIndexStr[sIndex]); writer.Write(','); writer.Write(zoneNumbers[o]); writer.Write(','); writer.Write(zoneNumbers[d]); writer.Write(','); writer.WriteLine(results[sIndex][o][d]); } } } } } }
public override void Generate(SparseArray<float> production, SparseArray<float> attractions) { if ( LoadData ) { if ( DailyRates == null ) { this.LoadDailyRates.LoadData(); this.DailyRates = this.LoadDailyRates.GiveData(); } if ( TimeOfDayRates == null ) { this.LoadTimeOfDayRates.LoadData(); this.TimeOfDayRates = this.LoadTimeOfDayRates.GiveData(); } } var flatProduction = production.GetFlatData(); var numberOfIndexes = flatProduction.Length; // Compute the Production ComputeProduction( flatProduction, numberOfIndexes ); float totalProduction = flatProduction.Sum(); WriteGenerationCSV( totalProduction ); //The PoRPoS Model does NOT include having an attraction component. The distribution will handle this case. if ( LoadData ) { this.DailyRates = null; this.TimeOfDayRates = null; } }
private void CheckInterchangeZone() { if (!CacheLoaded) { lock (this) { Thread.MemoryBarrier(); if (!CacheLoaded) { var zones = Root.ZoneSystem.ZoneArray; var distances = Root.ZoneSystem.Distances; var zone = zones[StationZone]; InterchangeZone = zone ?? throw new XTMFRuntimeException(this, "The zone " + StationZone + " does not exist! Please check the mode '" + ModeName + "!"); ClosestZone = zones.CreateSimilarArray <bool>(); var flatZones = zones.GetFlatData(); var flatClosest = ClosestZone.GetFlatData(); for (int i = 0; i < flatZones.Length; i++) { flatClosest[i] = AreWeClosest(flatZones[i], zones, distances); } CacheLoaded = true; Thread.MemoryBarrier(); } } } }
private ComputationResult Log(ComputationResult[] values) { if (values[0].IsValue) { return(new ComputationResult((float)Math.Log(values[0].LiteralValue))); } else if (values[0].IsVectorResult) { SparseArray <float> saveTo = values[0].Accumulator ? values[0].VectorData : values[0].VectorData.CreateSimilarArray <float>(); var source = values[0].VectorData.GetFlatData(); var flat = saveTo.GetFlatData(); VectorHelper.Log(flat, 0, source, 0, source.Length); return(new ComputationResult(saveTo, true)); } else { SparseTwinIndex <float> saveTo = values[0].Accumulator ? values[0].OdData : values[0].OdData.CreateSimilarArray <float>(); var source = values[0].OdData.GetFlatData(); var flat = saveTo.GetFlatData(); System.Threading.Tasks.Parallel.For(0, flat.Length, (int i) => { VectorHelper.Log(flat[i], 0, source[i], 0, source[i].Length); }); return(new ComputationResult(saveTo, true)); } }
// private SparseArray<EgressZoneChoice> EgressChoiceCache; private void EgressStation(int flatDest, SparseArray <IZone> zones) { float bestTime = float.MaxValue; Station bestEgress = new Station(); float travelTime; foreach (var station in this.Stations) { if (EgressTravelTime(station.zoneNumber, flatDest, AM, bestTime, out travelTime)) { bestTime = travelTime; bestEgress = station; } } if (bestEgress == null) { EgressUtils.TryAdd(flatDest, new EgressZoneChoice() { egressZone = null, EgressUtility = float.NaN }); } else { EgressUtils.TryAdd(flatDest, new EgressZoneChoice() { egressZone = zones.GetFlatData()[bestEgress.zoneNumber], EgressUtility = CalculateEgressUtility(bestEgress.zoneNumber, flatDest, AM) }); } }
public override void Generate(SparseArray<float> production, SparseArray<float> attractions) { if ( LoadData ) { if ( DailyRates == null ) { this.LoadDailyRates.LoadData(); this.DailyRates = this.LoadDailyRates.GiveData(); } if ( TimeOfDayRates == null ) { this.LoadTimeOfDayRates.LoadData(); this.TimeOfDayRates = this.LoadTimeOfDayRates.GiveData(); } } var flatProduction = production.GetFlatData(); var flatAttraction = attractions.GetFlatData(); var numberOfIndexes = flatAttraction.Length; // Compute the Production and Attractions ComputeProduction( flatProduction, flatAttraction, numberOfIndexes ); //We do not normalize the attraction if ( LoadData ) { this.LoadDailyRates.UnloadData(); this.LoadTimeOfDayRates.UnloadData(); DailyRates = null; TimeOfDayRates = null; } }
private void WriteOut <T>(SparseArray <T> aggregation, float[][] data, string fileName, Func <T, int> getValue) { var flatAggregation = aggregation.GetFlatData(); using (StreamWriter writer = new StreamWriter(fileName)) { // write the top hat writer.Write("Origin\\Destination"); for (int i = 0; i < flatAggregation.Length; i++) { var iNumber = getValue(flatAggregation[i]); writer.Write(','); writer.Write(iNumber); } writer.WriteLine(); for (int i = 0; i < flatAggregation.Length; i++) { writer.Write(getValue(flatAggregation[i])); for (int j = 0; j < data[i].Length; j++) { writer.Write(','); writer.Write(data[i][j]); } writer.WriteLine(); } } }
private float[][][] BuildData(string[] modeNames, SparseArray <IZone> zoneSystem, SparseArray <int> regions) { var modes = Root.AllModes.ToArray(); var data = new float[modes.Length][][]; var numberOfRegions = regions.GetFlatData().Length; for (int i = 0; i < data.Length; i++) { var row = data[i] = new float[numberOfRegions][]; for (int j = 0; j < row.Length; j++) { row[j] = new float[numberOfRegions]; } } using (CsvReader reader = new CsvReader(ZonalModeSplitFile)) { // burn header reader.LoadLine(); while (reader.LoadLine(out int columns)) { // ignore lines without the right number of columns if (columns == 4) { reader.Get(out string modeName, 0); reader.Get(out int originZone, 1); reader.Get(out int destinationZone, 2); reader.Get(out float expandedPersons, 3); data[ModeIndex(modeName, modeNames)][regions.GetFlatIndex(zoneSystem[originZone].RegionNumber)][regions.GetFlatIndex(zoneSystem[destinationZone].RegionNumber)] += expandedPersons; } } } return(data); }
private static void GenerateActivityLevels(string fileName, SparseArray <IZone> zoneArray) { var zones = zoneArray.GetFlatData(); string csvFileName = Path.GetTempFileName(); using (StreamWriter writer = new StreamWriter(csvFileName)) { writer.WriteLine("Zone,Retail Level,Other Level,Work Level"); for (int i = 0; i < zones.Length; i++) { writer.Write(zones[i].ZoneNumber); writer.Write(','); writer.Write(zones[i].RetailActivityLevel); writer.Write(','); writer.Write(zones[i].OtherActivityLevel); writer.Write(','); writer.WriteLine(zones[i].WorkActivityLevel); } } SparseZoneCreator creator = new SparseZoneCreator(zones.Last().ZoneNumber + 1, 3); creator.LoadCSV(csvFileName, true); creator.Save(fileName); File.Delete(csvFileName); }
private void SaveLinkagesToFile(float[][][] data) { if (SaveLinkages == null) { return; } SparseArray <IZone> zoneSystem = Root.ZoneSystem.ZoneArray; var zones = zoneSystem.GetFlatData(); var saveData = new float[zones.Length][]; for (int i = 0; i < saveData.Length; i++) { saveData[i] = new float[zones.Length]; for (int j = 0; j < saveData[i].Length; j++) { float total = 0.0f; for (int k = 0; k < data.Length; k++) { total += data[k][i][j]; } saveData[i][j] = total; } } SaveData.SaveMatrix(zones, saveData, SaveLinkages); }
private float[] CreateWorkersByCategory(SparseArray <float> occPopByZone, float[] workerSplits) { if (KeepLocalData && LocalWorkerCategories != null) { return(LocalWorkerCategories); } var pop = occPopByZone.GetFlatData(); var ret = new float[NumberOfWorkerCategories * pop.Length]; for (int workerCategory = 0; workerCategory < NumberOfWorkerCategories; workerCategory++) { int WorkerCategoryOffset = workerCategory * pop.Length; if (VectorHelper.IsHardwareAccelerated) { VectorHelper.Multiply(ret, WorkerCategoryOffset, pop, 0, workerSplits, WorkerCategoryOffset, pop.Length); } else { for (int i = 0; i < pop.Length; i++) { ret[i + WorkerCategoryOffset] = pop[i] * workerSplits[i + WorkerCategoryOffset]; } } } if (KeepLocalData) { LocalWorkerCategories = ret; } return(ret); }
override public void Generate(SparseArray <float> production, SparseArray <float> attractions) { var flatProduction = production.GetFlatData(); var flatAttraction = attractions.GetFlatData(); var numberOfIndexes = flatAttraction.Length; // Compute the Production and Attractions var totalProduction = ComputeProduction(flatProduction, numberOfIndexes); var totalAttraction = ComputeAttraction(flatAttraction, Root.ZoneSystem.ZoneArray.GetFlatData(), numberOfIndexes); // Normalize the attractions float productionAttractionRatio; if (totalAttraction != 0) { productionAttractionRatio = totalProduction / totalAttraction; // inverse totalAttraction to save on divisions } else { productionAttractionRatio = totalProduction / numberOfIndexes; } for (int i = 0; i < numberOfIndexes; i++) { flatAttraction[i] = flatAttraction[i] * productionAttractionRatio; } }
override public void Generate(SparseArray <float> production, SparseArray <float> attractions) { if (LoadData) { if (DailyRates == null) { this.LoadDailyRates.LoadData(); this.DailyRates = this.LoadDailyRates.GiveData(); } if (TimeOfDayRates == null) { this.LoadTimeOfDayRates.LoadData(); this.TimeOfDayRates = this.LoadTimeOfDayRates.GiveData(); } } var flatProduction = production.GetFlatData(); var flatAttraction = attractions.GetFlatData(); var numberOfIndexes = flatAttraction.Length; // Compute the Production and Attractions ComputeProduction(flatProduction, flatAttraction, numberOfIndexes); //We do not normalize the attraction if (LoadData) { this.LoadDailyRates.UnloadData(); this.LoadTimeOfDayRates.UnloadData(); DailyRates = null; TimeOfDayRates = null; } }
public void Generate(SparseArray<float> production, SparseArray<float> attractions) { var ages = this.Root.Demographics.AgeRates; var studentRates = this.Root.Demographics.SchoolRates.GetFlatData(); var zones = this.Root.ZoneSystem.ZoneArray.GetFlatData(); var prod = production.GetFlatData(); for ( int i = 0; i < zones.Length; i++ ) { // this is only null for externals var studentRatesForZone = studentRates[i]; if ( studentRatesForZone == null ) { // if it is an external ignore prod[i] = 0f; } else { // otherwise compute the production var pd = zones[i].PlanningDistrict; prod[i] = zones[i].Population * ages[zones[i].ZoneNumber, this.Age] * studentRatesForZone[this.Age, 0] * StudentDailyRates[pd, 0, this.Age] * StudentTimeOfDayRates[pd, 0, this.Age]; } } SaveProductionToDisk( zones, prod ); }
/// <summary> /// Load the probabilities from file /// </summary> /// <param name="zoneSystem">The zone system the model is using</param> private void LoadProbabilities(SparseArray <IZone> zoneSystem) { var zones = zoneSystem.GetFlatData(); AutoProbabilities = new float[zones.Length]; TransitProbabilities = new float[zones.Length]; TotalTrips = new float[zones.Length]; using (CsvReader reader = new CsvReader(ModeSplitTruthData)) { // burn header reader.LoadLine(); // read in the rest of the data int columns; while (reader.LoadLine(out columns)) { if (columns >= 3) { int zone; reader.Get(out zone, 0); zone = zoneSystem.GetFlatIndex(zone); if (zone >= 0) { float auto, transit, totalTrips; reader.Get(out auto, 1); reader.Get(out transit, 2); reader.Get(out totalTrips, 3); AutoProbabilities[zone] = auto; TransitProbabilities[zone] = transit; TotalTrips[zone] = totalTrips; } } } } }
public void IterationStarting(int iteration) { Households = new List <ITashaHousehold>(); ZoneSystem = Root.ZoneSystem.ZoneArray; var zones = ZoneSystem.GetFlatData(); BaseYearPopulation.LoadData(); var baseDensity = (float[])BaseYearPopulation.GiveData().GetFlatData().Clone(); BaseYearPopulation.UnloadData(); var area = zones.Select(z => { // A = (6InternalDistance)^2 // since the units are meters we can divide by 1000 to get to pop/km // 0.006 is 6/1000 var dist = (z.InternalDistance * 0.006f); return(dist * dist); }).ToArray(); var forecastDensity = zones.Select(z => (float)z.Population).ToArray(); VectorHelper.Divide(baseDensity, 0, baseDensity, 0, area, 0, baseDensity.Length); var regions = zones.Select(z => z.RegionNumber).ToArray(); VectorHelper.Divide(forecastDensity, 0, forecastDensity, 0, area, 0, baseDensity.Length); foreach (var pool in PopulationPools) { pool.Initialize(regions, baseDensity, forecastDensity); } }
private void LoadProbabilities(SparseArray <IZone> zoneSystem) { var zones = zoneSystem.GetFlatData(); ObservedDistribution = new float[zones.Length]; TotalTrips = new float[zones.Length]; using (CsvReader reader = new CsvReader(ObservedDistributionFile)) { // burn header reader.LoadLine(); // read in the rest of the data while (reader.LoadLine(out int columns)) { if (columns >= 2) { reader.Get(out int zone, 0); zone = zoneSystem.GetFlatIndex(zone); if (zone >= 0) { reader.Get(out float probability, 1); reader.Get(out float totalTrips, 2); ObservedDistribution[zone] = probability; TotalTrips[zone] = totalTrips; } } } } }
public EmmeMatrix(SparseArray<IZone> zoneSystem, float[][] data) { var zones = zoneSystem.GetFlatData(); MagicNumber = EmmeMagicNumber; Version = 1; Type = DataType.Float; Dimensions = 2; float[] temp = new float[zones.Length * zones.Length]; Indexes = new int[2][]; for(int i = 0; i < Indexes.Length; i++) { var row = Indexes[i] = new int[zones.Length]; for(int j = 0; j < row.Length; j++) { row[j] = zones[j].ZoneNumber; } } for(int i = 0; i < data.Length; i++) { Array.Copy(data[i], 0, temp, i * zones.Length, zones.Length); } FloatData = temp; DoubleData = null; SignedIntData = null; UnsignedIntData = null; }
private void StoreProductionData(SparseArray <float> production) { var age = AgeCategoryRange[0].Start; var mob = Mobility[0].Start; var emp = EmploymentStatusCategory[0].Start; var data = WorkerData.AcquireResource <SparseArray <SparseTriIndex <float> > >(); var flatData = data.GetFlatData(); var test = flatData[0]; var flatProduction = production.GetFlatData(); if (!test.GetFlatIndex(ref emp, ref mob, ref age)) { throw new XTMFRuntimeException("In " + Name + " we were unable to find a place to store our data (" + emp + "," + mob + "," + age + ")"); } int i = 0; try { for (; i < flatProduction.Length; i++) { flatData[i].GetFlatData()[emp][mob][age] = flatProduction[i]; } } catch { throw new XTMFRuntimeException("Failed Yo!"); } }
private bool CheckForOverlap(ref string error, SparseArray <Range> sparseArray) { var flatData = sparseArray.GetFlatData(); for (int i = 0; i < flatData.Length; i++) { for (int j = i + 1; j < flatData.Length; j++) { if (flatData[i].Start < flatData[j].Start) { if (flatData[i].Stop >= flatData[j].Start) { error = "In '" + Name + "' there is an overlap in age category '" + sparseArray.GetSparseIndex(i) + "' and '" + sparseArray.GetSparseIndex(j); return(true); } } else { if (flatData[j].Stop >= flatData[i].Start) { error = "In '" + Name + "' there is an overlap in age category '" + sparseArray.GetSparseIndex(i) + "' and '" + sparseArray.GetSparseIndex(j); return(true); } } } } return(false); }
public EmmeMatrix(SparseArray <IZone> zoneSystem, float[][] data) { var zones = zoneSystem.GetFlatData(); MagicNumber = EmmeMagicNumber; Version = 1; Type = DataType.Float; Dimensions = 2; float[] temp = new float[zones.Length * zones.Length]; Indexes = new int[2][]; for (int i = 0; i < Indexes.Length; i++) { var row = Indexes[i] = new int[zones.Length]; for (int j = 0; j < row.Length; j++) { row[j] = zones[j].ZoneNumber; } } for (int i = 0; i < data.Length; i++) { Array.Copy(data[i], 0, temp, i * zones.Length, zones.Length); } FloatData = temp; DoubleData = null; SignedIntData = null; UnsignedIntData = null; }
public static SparseTwinIndex<float> Process(SparseArray<float> production, float[] friction) { var ret = production.CreateSquareTwinArray<float>(); var flatRet = ret.GetFlatData(); var flatProduction = production.GetFlatData(); var numberOfZones = flatProduction.Length; try { // Make all of the frictions to the power of E Parallel.For( 0, friction.Length, new ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount }, delegate(int i) { friction[i] = (float)Math.Exp( friction[i] ); } ); Parallel.For( 0, numberOfZones, new ParallelOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount }, delegate(int i) { float sum = 0f; var iIndex = i * numberOfZones; // gather the sum of the friction for ( int j = 0; j < numberOfZones; j++ ) { sum += friction[iIndex + j]; } if ( sum <= 0 ) { return; } sum = 1f / sum; for ( int j = 0; j < numberOfZones; j++ ) { flatRet[i][j] = flatProduction[i] * ( friction[iIndex + j] * sum ); } } ); } catch ( AggregateException e ) { if ( e.InnerException is XTMFRuntimeException ) { throw new XTMFRuntimeException( e.InnerException.Message ); } else { throw new XTMFRuntimeException( e.InnerException.Message + "\r\n" + e.InnerException.StackTrace ); } } return ret; }
public void Generate(SparseArray<float> production, SparseArray<float> attractions) { var zones = this.Root.ZoneSystem.ZoneArray.GetFlatData(); var numberOfzones = zones.Length; var flatProduction = production.GetFlatData(); for ( int i = 0; i < numberOfzones; i++ ) { int regionIndex; if ( !this.InverseLookup( zones[i].RegionNumber, out regionIndex ) ) { // if this region is not included just continue flatProduction[i] = 0; continue; } flatProduction[i] = zones[i].Population * this.RegionPopulationParameter[regionIndex] + zones[i].Employment * this.RegionEmploymentParameter[regionIndex] + this.RegionConstantsParameter[regionIndex]; } }
public override void Generate(SparseArray<float> production, SparseArray<float> attractions) { if ( LoadData && Rates == null ) { this.LoadRates.LoadData(); this.Rates = this.LoadRates.GiveData(); } this.InitializeDemographicCategory(); var flatProduction = production.GetFlatData(); var numberOfIndexes = flatProduction.Length; // Compute the Production float totalProduction = 0; totalProduction = ComputeProduction( flatProduction, numberOfIndexes ); SaveGenerationData( totalProduction ); //The HBO Model does NOT include having an attraction component. The distribution will handle this case. if ( LoadData ) { this.Rates = null; } }
public override void Generate(SparseArray<float> production, SparseArray<float> attractions) { // Do nothing, the distribution needs to do it all anyways // The only thing this generation needs is the ability to setup the mode choice properly var flatProduction = production.GetFlatData(); var ageRates = this.Root.Demographics.AgeRates; var empRates = this.Root.Demographics.EmploymentStatusRates.GetFlatData(); var occRates = this.Root.Demographics.OccupationRates.GetFlatData(); var zones = this.Root.ZoneSystem.ZoneArray.GetFlatData(); var age = this.AgeCategoryRange[0].Start; var occ = this.OccupationCategory[0].Start; Parallel.For( 0, flatProduction.Length, (int i) => { float total = 0; var zoneNumber = zones[i].ZoneNumber; var emp = empRates[i]; var occRate = occRates[i]; if ( emp == null | occRate == null ) { flatProduction[i] = 0; } else { foreach ( var set in this.AllAges ) { for ( int a = set.Start; a <= set.Stop; a++ ) { total += ageRates[zoneNumber, a] * emp[a, 1] * occRate[a, 1, occ]; } } // NOTE, SINCE THE DISTRIBUTION DOES TRIP RATES BASED ON PDO->PDD WE ONLY NEED TO // GIVE THE TOTAL WORKERS FOR THIS GIVEN AGE // the rate is not just the age, but the weight of the age for valid workers flatProduction[i] = total > 0 ? ( ageRates[zoneNumber, age] * emp[age, 1] * occRate[age, 1, occ] ) / total : 0; } } ); }
internal void LoadData(SparseArray<IZone> zoneArray) { var zones = zoneArray.GetFlatData(); this.NumberOfZones = zones.Length; var dataSize = zones.Length * zones.Length * (int)DataTypes.NumberOfDataTypes; // now that we have zones we can build our data var data = Data == null || dataSize != Data.Length ? new float[dataSize] : Data; //now we need to load in each type LoadData(data, this.TravelTimeReader, (int)DataTypes.TravelTime, zoneArray, TimesLoaded); LoadData(data, this.CostReader, (int)DataTypes.Cost, zoneArray, TimesLoaded); TimesLoaded++; // now store it this.Data = data; }
private void Clear(SparseArray<float> pdArray) { var data = pdArray.GetFlatData(); for ( int i = 0; i < data.Length; i++ ) { data[i] = 0f; } }
internal void Load() { var timePeriods = Parent.TimePeriods; zoneSystem = Root.ZoneSystem.ZoneArray; zones = zoneSystem.GetFlatData(); if(To == null) { To = new float[timePeriods.Length][]; From = new float[timePeriods.Length][]; for(int i = 0; i < timePeriods.Length; i++) { To[i] = new float[zones.Length * zones.Length]; From[i] = new float[zones.Length * zones.Length]; } } expSamePD = (float)Math.Exp(SamePD); // raise the constants to e^constant to save CPU time during the main phase for(int i = 0; i < ODConstants.Length; i++) { ODConstants[i].ExpConstant = (float)Math.Exp(ODConstants[i].Constant); } var pds = TMG.Functions.ZoneSystemHelper.CreatePDArray<float>(Root.ZoneSystem.ZoneArray); BuildPDCube(pds); if(FlatZoneToPDCubeLookup == null) { FlatZoneToPDCubeLookup = zones.Select(zone => pds.GetFlatIndex(zone.PlanningDistrict)).ToArray(); } // now that we are done we can calculate our utilities CalculateUtilities(); }
private void LoadCapacityFactors() { try { CapacityFactorSource.LoadData(); CapacityFactor = CapacityFactorSource.GiveData(); CapacityFactorSource.UnloadData(); } catch { // if we were unable to load it properly make sure that it is unloaded CapacityFactorSource.UnloadData(); CapacityFactor = Root.ZoneSystem.ZoneArray.CreateSimilarArray<float>(); var flat = CapacityFactor.GetFlatData(); for(int i = 0; i < flat.Length; i++) { flat[i] = 1.0f; } } }
public static void SaveVector(SparseArray<float> data, string saveTo) { var flatData = data.GetFlatData(); var indexes = data.ValidIndexArray().Select(index => index.ToString()).ToArray(); using (StreamWriter writer = new StreamWriter(saveTo)) { writer.WriteLine("Zone,Value"); for (int i = 0; i < flatData.Length; i++) { writer.Write(indexes[i]); writer.Write(','); writer.WriteLine(flatData[i]); } } }
private static void BuildPlanningDistrictData(float[][] populationByAge, SparseArray<IZone> zones, SparseArray<float>[] pdData) { pdData[0] = TMG.Functions.ZoneSystemHelper.CreatePDArray<float>( zones ); for ( int i = 1; i < pdData.Length; i++ ) { pdData[i] = pdData[0].CreateSimilarArray<float>(); } var flatZones = zones.GetFlatData(); for ( int i = 0; i < populationByAge.Length; i++ ) { //the first step is to clear out the data var array = populationByAge[i]; var pdArray = pdData[i]; for ( int j = 0; j < array.Length; j++ ) { pdArray[flatZones[j].PlanningDistrict] += array[j]; } } }
private static void BuildDistribution(SparseTwinIndex<float> ret, SparseArray<float> O, SparseArray<float> D, int oLength, float[] flows) { var retFlat = ret.GetFlatData(); var ratio = O.GetFlatData().Sum() / flows.Sum(); if ( float.IsNaN( ratio ) | float.IsInfinity( ratio ) ) { Parallel.For( 0, retFlat.Length, delegate(int i) { var iOffset = i * oLength; var ith = retFlat[i]; for ( int j = 0; j < oLength; j++ ) { ith[j] = ( float.IsNaN( ratio ) | float.IsInfinity( ratio ) ) ? 0f : flows[iOffset + j]; } } ); return; } Parallel.For( 0, retFlat.Length, delegate(int i) { var iOffset = i * oLength; var ith = retFlat[i]; for ( int j = 0; j < oLength; j++ ) { ith[j] = flows[iOffset + j] * ratio; } } ); }
public SparseTwinIndex<float> ProcessFlow(float[] Friction, SparseArray<float> O, SparseArray<float> D) { float[] o = O.GetFlatData(); float[] d = D.GetFlatData(); var oLength = o.Length; var dLength = d.Length; var squareSize = oLength * dLength; Stopwatch watch = new Stopwatch(); watch.Start(); gravityModelShader.NumberOfXThreads = length; gravityModelShader.NumberOfYThreads = 1; gravityModelShader.ThreadGroupSizeX = 64; gravityModelShader.ThreadGroupSizeY = 1; float[] balanced = new float[] { 0, this.Epsilon }; int iterations = 0; var step1 = new int[] { oLength, 0, this.MaxIterations }; var step2 = new int[] { oLength, 1, this.MaxIterations }; if ( flows == null || flows.Length != o.Length * d.Length ) { flows = new float[squareSize]; } SparseTwinIndex<float> ret = null; Task createReturn = new Task( delegate() { ret = O.CreateSquareTwinArray<float>(); } ); createReturn.Start(); FillAndLoadBuffers( o, d, Friction, balanced ); iterations = Balance( gpu, gravityModelShader, balancedBuffer, parameters, balanced, iterations, step1, step2 ); gpu.Read( flowsBuffer, flows ); gravityModelShader.RemoveAllBuffers(); createReturn.Wait(); BuildDistribution( ret, O, D, oLength, flows ); watch.Stop(); using ( StreamWriter writer = new StreamWriter( "GPUPerf.txt", true ) ) { writer.Write( "Iterations:" ); writer.WriteLine( iterations ); writer.Write( "Time(ms):" ); writer.WriteLine( watch.ElapsedMilliseconds ); } return ret; }
private bool CheckForOverlap(ref string error, SparseArray<Range> sparseArray) { var flatData = sparseArray.GetFlatData(); for ( int i = 0; i < flatData.Length; i++ ) { for ( int j = i + 1; j < flatData.Length; j++ ) { if ( flatData[i].Start < flatData[j].Start ) { if ( flatData[i].Stop >= flatData[j].Start ) { error = "In '" + this.Name + "' there is an overlap in age category '" + sparseArray.GetSparseIndex( i ) + "' and '" + sparseArray.GetSparseIndex( j ); return true; } } else { if ( flatData[j].Stop >= flatData[i].Start ) { error = "In '" + this.Name + "' there is an overlap in age category '" + sparseArray.GetSparseIndex( i ) + "' and '" + sparseArray.GetSparseIndex( j ); return true; } } } } return false; }
private void StoreProductionData(SparseArray<float> production) { var age = AgeCategoryRange[0].Start; var mob = Mobility[0].Start; var emp = EmploymentStatusCategory[0].Start; var data = WorkerData.AquireResource<SparseArray<SparseTriIndex<float>>>(); var flatData = data.GetFlatData(); var test = flatData[0]; var flatProduction = production.GetFlatData(); if(!test.GetFlatIndex(ref emp, ref mob, ref age)) { throw new XTMFRuntimeException("In " + Name + " we were unable to find a place to store our data (" + emp + "," + mob + "," + age + ")"); } int i = 0; try { for(; i < flatProduction.Length; i++) { flatData[i].GetFlatData()[emp][mob][age] = flatProduction[i]; } } catch { throw new XTMFRuntimeException("Failed Yo!"); } }
public void LoadData() { // setup our zones ZoneArray = Root.ZoneSystem.ZoneArray; Zones = ZoneArray.GetFlatData(); if(Data == null || Regenerate) { var data = Data; // now that we have zones we can build our data if(data == null) { data = new float[Zones.Length * Zones.Length * (int)DataTypes.NumberOfDataTypes]; } //now we need to load in each type LoadData(data, TravelTimeReader, (int)DataTypes.TravelTime, Data != null & ApplyTimeBlending); LoadData(data, CostReader, (int)DataTypes.Cost, false ); // now store it Data = data; } }
/// <summary> /// Save the data from the given split data to the given file as CSV. /// EmpStat, /// </summary> /// <param name="writer">The stream to write to.</param> /// <param name="splitData">The data to use</param> /// <param name="empCode">The empStat code to dump</param> private void WriteData(StreamWriter writer, SparseArray<float[]> splitData, char empCode) { var data = splitData.GetFlatData(); for ( int i = 0; i < data.Length; i++ ) { var row = splitData[i]; if ( row != null ) { // buffer as much of the header ahead of time to help performance var pdStr = string.Concat( empCode, ",", splitData.GetFlatIndex( i ), "," ); for ( int j = 0; j < row.Length; j++ ) { writer.Write( pdStr ); writer.Write( j + 1 ); writer.Write( ',' ); writer.WriteLine( row[j] ); } } } }
private void LoadData(float[] data, IReadODData<float> readODData, int dataTypeOffset, SparseArray<IZone> zoneArray, int timesLoaded) { if(readODData == null) { return; } var zones = zoneArray.GetFlatData(); var numberOfZones = zones.Length; var dataTypes = (int)DataTypes.NumberOfDataTypes; int previousPointO = -1; int previousFlatO = -1; if(timesLoaded == 0) { foreach(var point in readODData.Read()) { var o = point.O == previousPointO ? previousFlatO : zoneArray.GetFlatIndex(point.O); var d = zoneArray.GetFlatIndex(point.D); if(o >= 0 & d >= 0) { previousPointO = point.O; previousFlatO = o; var index = (o * numberOfZones + d) * dataTypes + dataTypeOffset; data[index] = point.Data; } } } else { var iteration = timesLoaded + 1; var previousFraction = 1.0f / (iteration + 1.0f); var currentFraction = iteration / (1.0f + iteration); foreach(var point in readODData.Read()) { var o = point.O == previousPointO ? previousFlatO : zoneArray.GetFlatIndex(point.O); var d = zoneArray.GetFlatIndex(point.D); if(o >= 0 & d >= 0) { previousPointO = point.O; previousFlatO = o; var index = (o * numberOfZones + d) * dataTypes + dataTypeOffset; data[index] = data[index] * previousFraction + point.Data * currentFraction; } } } }
public void IterationStarting(int iteration) { // initialize data structures HouseholdsByPD = ZoneSystemHelper.CreatePDArray<PDData>(Root.ZoneSystem.ZoneArray); var flat = HouseholdsByPD.GetFlatData(); for(int i = 0; i < flat.Length; i++) { flat[i] = new PDData(HouseholdsByPD.GetSparseIndex(i)); } }
public SparseTwinIndex<float> ProcessFlow(SparseArray<float> O, SparseArray<float> D) { float[] o = O.GetFlatData(); float[] d = D.GetFlatData(); var oLength = o.Length; var dLength = d.Length; var squareSize = oLength * dLength; float[] flows = new float[squareSize]; float[] residules = new float[dLength]; GPU gpu = new GPU(); string programPath; var codeBase = Assembly.GetEntryAssembly().CodeBase; try { programPath = Path.GetFullPath( codeBase ); } catch { programPath = codeBase.Replace( "file:///", String.Empty ); } // Since the modules are always located in the ~/Modules subdirectory for XTMF, // we can just go in there to find the script ComputeShader gravityModelShader = null; Task compile = new Task( delegate() { gravityModelShader = gpu.CompileComputeShader( Path.Combine( Path.GetDirectoryName( programPath ), "Modules", "GravityModel.hlsl" ), "CSMain" ); gravityModelShader.NumberOfXThreads = oLength; gravityModelShader.NumberOfYThreads = 1; gravityModelShader.ThreadGroupSizeX = 64; gravityModelShader.ThreadGroupSizeY = 1; } ); compile.Start(); GPUBuffer flowsBuffer = gpu.CreateBuffer( squareSize, 4, true ); GPUBuffer attractionStarBuffer = gpu.CreateBuffer( oLength, 4, true ); GPUBuffer balancedBuffer = gpu.CreateBuffer( 2, 4, true ); GPUBuffer productionBuffer = gpu.CreateBuffer( dLength, 4, false ); GPUBuffer attractionBuffer = gpu.CreateBuffer( oLength, 4, false ); GPUBuffer frictionBuffer = gpu.CreateBuffer( squareSize, 4, false ); GPUBuffer parameters = gpu.CreateConstantBuffer( 16 ); float[] balanced = new float[] { 0, this.Epsilon }; int iterations = 0; var step1 = new int[] { oLength, 0, this.MaxIterations }; var step2 = new int[] { oLength, 1, this.MaxIterations }; compile.Wait(); Stopwatch watch = new Stopwatch(); watch.Start(); FillAndLoadBuffers( o, d, Friction, gpu, gravityModelShader, flowsBuffer, attractionStarBuffer, balancedBuffer, productionBuffer, attractionBuffer, frictionBuffer, parameters, balanced ); if ( gravityModelShader == null ) { throw new XTMF.XTMFRuntimeException( "Unable to compile the GravityModel GPU Kernel!" ); } iterations = Balance( gpu, gravityModelShader, balancedBuffer, parameters, balanced, iterations, step1, step2 ); gpu.Read( flowsBuffer, flows ); gravityModelShader.RemoveAllBuffers(); watch.Stop(); using ( StreamWriter writer = new StreamWriter( "GPUPerf.txt", true ) ) { writer.Write( "Iteraions:" ); writer.WriteLine( iterations ); writer.Write( "Time(ms):" ); writer.WriteLine( watch.ElapsedMilliseconds ); } gravityModelShader.Dispose(); gpu.Release(); return BuildDistribution( O, D, oLength, flows ); }
public SparseTwinIndex<float> ProcessFlow(SparseArray<float> O, SparseArray<float> D, int[] validIndexes, SparseArray<float> attractionStar = null) { int length = validIndexes.Length; Productions = O; Attractions = D; if(attractionStar == null) { AttractionsStar = D.CreateSimilarArray<float>(); } else { AttractionsStar = attractionStar; } FlowMatrix = Productions.CreateSquareTwinArray<float>(); if(Friction == null) { InitializeFriction(length); } var flatAttractionStar = AttractionsStar.GetFlatData(); float[] oldTotal = new float[flatAttractionStar.Length]; var flatAttractions = Attractions.GetFlatData(); for(int i = 0; i < length; i++) { flatAttractionStar[i] = 1f; oldTotal[i] = flatAttractions[i]; } int iteration = 0; float[] columnTotals = new float[length]; var balanced = false; do { if(ProgressCallback != null) { // this doesn't go to 100%, but that is alright since when we end, the progress // of the calling model should assume we hit 100% ProgressCallback(iteration / (float)MaxIterations); } Array.Clear(columnTotals, 0, columnTotals.Length); if(Vector.IsHardwareAccelerated) { VectorProcessFlow(columnTotals, FlowMatrix.GetFlatData()); } else { ProcessFlow(columnTotals); } balanced = Balance(columnTotals, oldTotal); } while((++iteration) < MaxIterations && !balanced); if(ProgressCallback != null) { ProgressCallback(1f); } return FlowMatrix; }
/// <summary> /// Normalize the sparse array across rows. /// </summary> /// <param name="employmentData">The data to normalizes</param> private void Normalize(SparseArray<float[]> employmentData) { var data = employmentData.GetFlatData(); for ( int i = 0; i < data.Length; i++ ) { var row = data[i]; if ( row != null ) { Normalize( row ); } } }
/// <summary> /// Load the probabilities from file /// </summary> /// <param name="zoneSystem">The zone system the model is using</param> private void LoadProbabilities(SparseArray<IZone> zoneSystem) { var zones = zoneSystem.GetFlatData(); AutoProbabilities = new float[zones.Length]; TransitProbabilities = new float[zones.Length]; TotalTrips = new float[zones.Length]; using (CsvReader reader = new CsvReader(ModeSplitTruthData)) { // burn header reader.LoadLine(); // read in the rest of the data int columns; while(reader.LoadLine(out columns)) { if(columns >= 3) { int zone; reader.Get(out zone, 0); zone = zoneSystem.GetFlatIndex(zone); if(zone >= 0) { float auto, transit, totalTrips; reader.Get(out auto, 1); reader.Get(out transit, 2); reader.Get(out totalTrips, 3); AutoProbabilities[zone] = auto; TransitProbabilities[zone] = transit; TotalTrips[zone] = totalTrips; } } } } }
private void FillRatioIntraZonalTravelTime(int districtNumber, IZone[] flatZones, SparseTwinIndex<float> matrix, SparseArray<float> radius) { var validDistricts = radius.ValidIndexArray(); var flatRadius = radius.GetFlatData(); for ( int otherDistrict = 0; otherDistrict < validDistricts.Length; otherDistrict++ ) { var sparseOther = radius.GetSparseIndex( otherDistrict ); if ( sparseOther == districtNumber ) continue; if ( this.AnyTripIntraDistrict( otherDistrict, flatZones, matrix ) ) { var distanceRatio = radius[districtNumber] / flatRadius[otherDistrict]; var data = matrix.GetFlatData(); var averageTT = GetAverageIntraDistrictNonIntraZonalTravelTime( sparseOther, flatZones, data ); var averageIntraZonealTT = GetAverageIntraZonalTravelTime( sparseOther, flatZones, data ); var zoneRatio = GetNumberOfZonesRatio( flatZones, districtNumber, sparseOther ); averageTT *= distanceRatio * zoneRatio; averageIntraZonealTT *= distanceRatio * zoneRatio; for ( int i = 0; i < flatZones.Length; i++ ) { if ( flatZones[i].PlanningDistrict != districtNumber ) continue; for ( int j = 0; j < flatZones.Length; j++ ) { if ( flatZones[j].PlanningDistrict != districtNumber ) continue; if ( i == j ) { data[i][j] = averageIntraZonealTT; } else { data[i][j] = averageTT; } } } break; } } }
private float[][][] BuildData(string[] modeNames, SparseArray<IZone> zoneSystem, SparseArray<int> regions) { var zones = zoneSystem.GetFlatData(); var modes = Root.AllModes.ToArray(); var data = new float[modes.Length][][]; var numberOfRegions = regions.GetFlatData().Length; for(int i = 0; i < data.Length; i++) { var row = data[i] = new float[numberOfRegions][]; for(int j = 0; j < row.Length; j++) { row[j] = new float[numberOfRegions]; } } using (CsvReader reader = new CsvReader(ZonalModeSplitFile)) { // burn header reader.LoadLine(); int columns; while(reader.LoadLine(out columns)) { // ignore lines without the right number of columns if(columns == 4) { string modeName; int originZone, destinationZone; float expandedPersons; reader.Get(out modeName, 0); reader.Get(out originZone, 1); reader.Get(out destinationZone, 2); reader.Get(out expandedPersons, 3); data[ModeIndex(modeName, modeNames)][regions.GetFlatIndex(zoneSystem[originZone].PlanningDistrict)][regions.GetFlatIndex(zoneSystem[destinationZone].PlanningDistrict)] += expandedPersons; } } } return data; }
private void WriteAttractionFile(SparseArray<float> attractions) { if(!AttractionFileName.ContainsFileName()) { return; } var flatAttractions = attractions.GetFlatData(); bool first = !File.Exists(AttractionFileName.GetFileName()); StringBuilder buildInside = new StringBuilder(); buildInside.Append(','); buildInside.Append(AgeCategoryRange.ToString()); buildInside.Append(','); buildInside.Append(EmploymentStatusCategory.ToString()); buildInside.Append(','); buildInside.Append(OccupationCategory.ToString()); buildInside.Append(','); buildInside.Append(Mobility.ToString()); buildInside.Append(','); string categoryData = buildInside.ToString(); using (StreamWriter writer = new StreamWriter(AttractionFileName.GetFileName(), true)) { if(first) { // if we are the first thing to generate, then write the header as well writer.WriteLine("Zone,Age,Employment,Occupation,Mobility,Attraction"); } for(int i = 0; i < flatAttractions.Length; i++) { writer.Write(attractions.GetSparseIndex(i)); writer.Write(categoryData); writer.WriteLine(flatAttractions[i]); } } }