public static MadingleyModelInitialisation ConvertInitialisation( Madingley.Common.ModelState m, Madingley.Common.Configuration d, Madingley.Common.Environment e) { var i = new MadingleyModelInitialisation("", "", "", ""); i.GlobalModelTimeStepUnit = d.GlobalModelTimeStepUnit; i.NumTimeSteps = (uint)d.NumTimeSteps; i.BurninTimeSteps = (uint)d.BurninTimeSteps; i.ImpactTimeSteps = (uint)d.ImpactTimeSteps; i.RecoveryTimeSteps = (uint)d.RecoveryTimeSteps; i.CellSize = e.CellSize; i.BottomLatitude = (float)e.BottomLatitude; i.TopLatitude = (float)e.TopLatitude; i.LeftmostLongitude = (float)e.LeftmostLongitude; i.RightmostLongitude = (float)e.RightmostLongitude; i.RunCellsInParallel = d.RunCellsInParallel; i.RunSimulationsInParallel = d.RunSimulationsInParallel; i.RunRealm = d.RunRealm; i.DrawRandomly = d.DrawRandomly; i.ExtinctionThreshold = d.ExtinctionThreshold; i.MaxNumberOfCohorts = d.MaxNumberOfCohorts; i.DispersalOnly = d.DispersalOnly; i.PlanktonDispersalThreshold = d.PlanktonDispersalThreshold; i.SpecificLocations = e.SpecificLocations; i.InitialisationFileStrings = new SortedList <string, string>(); i.InitialisationFileStrings["OutputDetail"] = "high"; i.InitialisationFileStrings["DispersalOnlyType"] = d.DispersalOnlyType; i.CohortFunctionalGroupDefinitions = ConvertFunctionalGroupDefinitions(d.CohortFunctionalGroupDefinitions); i.StockFunctionalGroupDefinitions = ConvertFunctionalGroupDefinitions(d.StockFunctionalGroupDefinitions); if (m != null) { i.EnviroStack = ConvertEnvironment(m.GridCells); } else { i.EnviroStack = ConvertEnvironment(e.CellEnvironment); } i.CellList = e.FocusCells.Select(a => new UInt32[] { (uint)a.Item1, (uint)a.Item2 }).ToList(); i.TrackProcesses = true; i.TrackCrossCellProcesses = true; i.TrackGlobalProcesses = true; i.Units = new SortedList <string, string>(e.Units); i.ImpactCellIndices = d.ImpactCellIndices.Select(ii => (uint)ii).ToList(); i.ImpactAll = d.ImpactAll; if (m != null) { i.ModelStates = ConvertModelStates(m, d, e); i.InputState = true; i.InputGlobalDiagnosticVariables = new SortedList <string, double>(m.GlobalDiagnosticVariables); } else { i.ModelStates = null; i.InputState = false; } return(i); }
public static List <InputModelState> ConvertModelStates( Madingley.Common.ModelState modelState, Madingley.Common.Configuration c, Madingley.Common.Environment e) { var numLatCells = (UInt32)((e.TopLatitude - e.BottomLatitude) / e.CellSize); var numLonCells = (UInt32)((e.RightmostLongitude - e.LeftmostLongitude) / e.CellSize); // Set up a grid of grid cells var gridCellCohorts = new GridCellCohortHandler[numLatCells, numLonCells]; var gridCellStocks = new GridCellStockHandler[numLatCells, numLonCells]; gridCellCohorts[0, 0] = new GridCellCohortHandler(c.CohortFunctionalGroupDefinitions.Data.Count()); var cellList = e.FocusCells.ToArray(); var gridCells = modelState.GridCells.ToArray(); for (var ii = 0; ii < cellList.Count(); ii++) { var gridCell = gridCells[ii]; gridCellCohorts[cellList[ii].Item1, cellList[ii].Item2] = ConvertCohorts(gridCell.Cohorts); gridCellStocks[cellList[ii].Item1, cellList[ii].Item2] = ConvertStocks(gridCell.Stocks); } var inputModelState = new InputModelState(gridCellCohorts, gridCellStocks); return(new List <InputModelState>() { inputModelState }); }
public MadingleyModelOutput( string outputFilesSuffix, Madingley.Common.Environment environment, Madingley.Common.Configuration configuration, MadingleyModelInitialisation outputSettings, Madingley.Common.ModelState modelState) { var modelInitialisation = Converters.ConvertInitialisation( outputSettings, configuration, environment); this.ProcessTracker = new Madingley.Common.IProcessTracker[environment.FocusCells.Count()][]; for (var cellIndex = 0; cellIndex < environment.FocusCells.Count(); cellIndex++) { var processTracker = new GEMProcessTracker(cellIndex, this); this.ProcessTracker[cellIndex] = new Madingley.Common.IProcessTracker[] { processTracker }; } var globalProcessTracker = new GEMGlobalProcessTracker(this); this.GlobalProcessTracker = new Madingley.Common.IGlobalProcessTracker[] { globalProcessTracker }; var crossCellProcessTracker = new GEMCrossCellProcessTracker(this); this.CrossCellProcessTracker = new Madingley.Common.ICrossCellProcessTracker[] { crossCellProcessTracker }; this.model = new MadingleyModel( modelInitialisation, outputFilesSuffix, configuration.Simulation, modelState); }
public static MadingleyModelInitialisation ConvertInitialisation( MadingleyModelInitialisation outputSettings, Madingley.Common.Configuration configuration, Madingley.Common.Environment e) { var i = new MadingleyModelInitialisation(); i.GlobalModelTimeStepUnit = configuration.GlobalModelTimeStepUnit; i.NumTimeSteps = (uint)configuration.NumTimeSteps; i.CellSize = (float)e.CellSize; i.BottomLatitude = (float)e.BottomLatitude; i.TopLatitude = (float)e.TopLatitude; i.LeftmostLongitude = (float)e.LeftmostLongitude; i.RightmostLongitude = (float)e.RightmostLongitude; i.PlanktonDispersalThreshold = outputSettings.PlanktonDispersalThreshold; i.InitialisationFileStrings = new SortedList <string, string>(); i.SpecificLocations = e.SpecificLocations; i.InitialisationFileStrings.Add("OutputDetail", outputSettings.InitialisationFileStrings["OutputDetail"]); i.CohortFunctionalGroupDefinitions = ConvertFunctionalGroupDefinitions(configuration.CohortFunctionalGroupDefinitions); i.StockFunctionalGroupDefinitions = ConvertFunctionalGroupDefinitions(configuration.StockFunctionalGroupDefinitions); i.EnviroStack = e.CellEnvironment.Select(env => new SortedList <string, double[]>(env)).ToArray(); i.CellList = e.FocusCells.Select(a => new UInt32[] { (UInt32)a.Item1, (UInt32)a.Item2 }).ToList(); i.OutputPath = outputSettings.OutputPath; i.TrackProcesses = outputSettings.TrackProcesses; i.TrackCrossCellProcesses = outputSettings.TrackCrossCellProcesses; i.TrackGlobalProcesses = outputSettings.TrackGlobalProcesses; i.ProcessTrackingOutputs = outputSettings.ProcessTrackingOutputs; i.ModelMassBins = outputSettings.ModelMassBins; i.LiveOutputs = outputSettings.LiveOutputs; i.TrackMarineSpecifics = outputSettings.TrackMarineSpecifics; i.OutputMetrics = outputSettings.OutputMetrics; i.OutputStateTimestep = outputSettings.OutputStateTimestep; return(i); }
public static void RunTraditional( Madingley.Common.Configuration configuration, Madingley.Common.Environment environment, Func <Madingley.Common.RunState, Madingley.Common.Configuration, Madingley.Common.Environment, Madingley.Common.ModelState, Madingley.Common.IOutput> factory) { var progress = new ProgressReporter(); var cancellationToken = CancellationToken.None; Run <Object>(null, configuration, environment, factory, progress, cancellationToken).Last(); }
public static void Save(string directory, Madingley.Common.Configuration c) { CreateDirectories(directory); var convertTimeSteps = 12; var parameters = new Tuple <string, string>[] { Tuple.Create("Parameter", "Value"), Tuple.Create("Timestep Units", c.GlobalModelTimeStepUnit), Tuple.Create("Length of simulation (years)", (c.NumTimeSteps / convertTimeSteps).ToString()), Tuple.Create("Burn-in (years)", (c.BurninTimeSteps / convertTimeSteps).ToString()), Tuple.Create("Impact duration (years)", (c.ImpactTimeSteps / convertTimeSteps).ToString()), Tuple.Create("Recovery duration (years)", (c.RecoveryTimeSteps / convertTimeSteps).ToString()), Tuple.Create("Plankton size threshold", c.PlanktonDispersalThreshold.ToString()), Tuple.Create("Draw Randomly", c.DrawRandomly ? "yes" : "no"), Tuple.Create("Extinction Threshold", c.ExtinctionThreshold.ToString()), Tuple.Create("Maximum Number Of Cohorts", c.MaxNumberOfCohorts.ToString()), Tuple.Create("Run Cells In Parallel", c.RunCellsInParallel ? "yes" : "no"), Tuple.Create("Run Simulations In Parallel", c.RunSimulationsInParallel ? "yes" : "no"), Tuple.Create("Run Single Realm", c.RunRealm), Tuple.Create("Impact Cell Index", System.String.Join(";", c.ImpactCellIndices)), // Tuple.Create("ImpactAll", if c.ImpactAll then "yes" else "no"), Tuple.Create("Dispersal only", c.DispersalOnly ? "yes" : "no"), Tuple.Create("Dispersal only type", c.DispersalOnlyType) }; using (var writer = new StreamWriter(Path.Combine(directory, "SimulationControlParameters.csv"))) { parameters.ToList().ForEach(kv => writer.WriteLine(String.Format("{0},{1}", kv.Item1, kv.Item2))); } var fileLocationParameters = new Tuple <string, string>[] { Tuple.Create("Parameter", "Value"), Tuple.Create("Mass Bin Filename", "MassBinDefinitions.csv"), Tuple.Create("Environmental Data File", "EnvironmentalDataLayers.csv"), Tuple.Create("Cohort Functional Group Definitions File", "CohortFunctionalGroupDefinitions.csv"), Tuple.Create("Stock Functional Group Definitions File", "StockFunctionalGroupDefinitions.csv"), Tuple.Create("Ecological parameters file", "EcologicalParameters.csv") }; using (var writer = new StreamWriter(Path.Combine(directory, "FileLocationParameters.csv"))) { fileLocationParameters.ToList().ForEach(kv => writer.WriteLine(String.Format("{0},{1}", kv.Item1, kv.Item2))); } EcologicalParameters.Save(c.EcologicalParameters, Path.Combine(directory, "Ecological Definition Files", "EcologicalParameters.csv")); FunctionalGroupDefinitions.Save(c.CohortFunctionalGroupDefinitions, Path.Combine(directory, "Ecological Definition Files", "CohortFunctionalGroupDefinitions.csv")); FunctionalGroupDefinitions.Save(c.StockFunctionalGroupDefinitions, Path.Combine(directory, "Ecological Definition Files", "StockFunctionalGroupDefinitions.csv")); ScenarioParameters.SaveScenarios(c.ScenarioParameters, Path.Combine(directory, "Initial Model State Setup", "Scenarios.csv")); }
public static IEnumerable <Tuple <Madingley.Common.RunState, ReturnType> > Run <ReturnType>( Madingley.Common.RunState state, Madingley.Common.Configuration configuration, Madingley.Common.Environment environment, Func <Madingley.Common.RunState, Madingley.Common.Configuration, Madingley.Common.Environment, Madingley.Common.ModelState, Madingley.Common.IOutput> factory, IProgress <double> progress, CancellationToken cancellation) { var startTimeStep = state != null ? (uint)state.ModelState.TimestepsComplete : (uint)0; var modelState = state != null ? state.ModelState : null; var madingleyModel = new MadingleyModel( modelState, configuration, environment); var beginRunModelStateData = madingleyModel.CreateModelStateData(startTimeStep); var output = factory.Invoke(state, configuration, environment, beginRunModelStateData); // Run the simulation return(madingleyModel.Initialise <ReturnType>(startTimeStep, output, progress, cancellation)); }
public static Madingley.Common.Configuration Deserialize(TextReader sr) { Func <Newtonsoft.Json.JsonTextReader, Madingley.Common.FunctionalGroupDefinition> JsonReadFunctionalGroupDefinition = (reader) => { var ret = new Madingley.Common.FunctionalGroupDefinition(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "Definitions": ret.Definitions = Common.Reader.ReadKeyValuePairs(reader, Common.Reader.ReadString); break; case "Properties": ret.Properties = Common.Reader.ReadKeyValuePairs(reader, Common.Reader.ReadDouble); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return(ret); }; Func <Newtonsoft.Json.JsonTextReader, Madingley.Common.FunctionalGroupDefinitions> JsonReadFunctionalGroupDefinitions = (reader) => { var ret = new Madingley.Common.FunctionalGroupDefinitions(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "Data": ret.Data = Common.Reader.ReadArray(reader, JsonReadFunctionalGroupDefinition); break; case "Definitions": ret.Definitions = Common.Reader.ReadArray(reader, Common.Reader.ReadString); break; case "Properties": ret.Properties = Common.Reader.ReadArray(reader, Common.Reader.ReadString); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return(ret); }; Func <Newtonsoft.Json.JsonTextReader, Madingley.Common.ScenarioParameter> JsonReadScenarioParameter = (reader) => { var ret = new Madingley.Common.ScenarioParameter(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "ParamString": ret.ParamString = Common.Reader.ReadString(reader); break; case "ParamDouble1": ret.ParamDouble1 = Common.Reader.ReadDouble(reader); break; case "ParamDouble2": ret.ParamDouble2 = Common.Reader.ReadDouble(reader); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return(ret); }; Func <Newtonsoft.Json.JsonTextReader, IDictionary <string, Madingley.Common.ScenarioParameter> > JsonReadKVPScenarioParameter = (reader) => { var ret = new Dictionary <string, Madingley.Common.ScenarioParameter>(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var key = Convert.ToString(reader.Value); reader.Read(); var value = JsonReadScenarioParameter(reader); ret.Add(key, value); } return(ret); }; Func <Newtonsoft.Json.JsonTextReader, Madingley.Common.ScenarioParameters> JsonReadScenarioParameters = (reader) => { var ret = new Madingley.Common.ScenarioParameters(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "Label": ret.Label = Common.Reader.ReadString(reader); break; case "SimulationNumber": ret.SimulationNumber = Common.Reader.ReadInt(reader); break; case "Parameters": ret.Parameters = JsonReadKVPScenarioParameter(reader); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return(ret); }; Func <Newtonsoft.Json.JsonTextReader, Madingley.Common.EcologicalParameters> JsonReadEcologicalParameters = (reader) => { var ret = new Madingley.Common.EcologicalParameters(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "Parameters": ret.Parameters = Common.Reader.ReadKeyValuePairs(reader, Common.Reader.ReadDouble); break; case "TimeUnits": ret.TimeUnits = Common.Reader.ReadArray(reader, Common.Reader.ReadString); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return(ret); }; var configuration = new Madingley.Common.Configuration(); using (var reader = new Newtonsoft.Json.JsonTextReader(sr)) { reader.Read(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "GlobalModelTimeStepUnit": configuration.GlobalModelTimeStepUnit = Common.Reader.ReadString(reader); break; case "NumTimeSteps": configuration.NumTimeSteps = Common.Reader.ReadInt(reader); break; case "BurninTimeSteps": configuration.BurninTimeSteps = Common.Reader.ReadInt(reader); break; case "ImpactTimeSteps": configuration.ImpactTimeSteps = Common.Reader.ReadInt(reader); break; case "RecoveryTimeSteps": configuration.RecoveryTimeSteps = Common.Reader.ReadInt(reader); break; case "RunCellsInParallel": configuration.RunCellsInParallel = Common.Reader.ReadBoolean(reader); break; case "RunSimulationsInParallel": configuration.RunSimulationsInParallel = Common.Reader.ReadBoolean(reader); break; case "RunRealm": configuration.RunRealm = Common.Reader.ReadString(reader); break; case "DrawRandomly": configuration.DrawRandomly = Common.Reader.ReadBoolean(reader); break; case "ExtinctionThreshold": configuration.ExtinctionThreshold = Common.Reader.ReadDouble(reader); break; case "MaxNumberOfCohorts": configuration.MaxNumberOfCohorts = Common.Reader.ReadInt(reader); break; case "DispersalOnly": configuration.DispersalOnly = Common.Reader.ReadBoolean(reader); break; case "DispersalOnlyType": configuration.DispersalOnlyType = Common.Reader.ReadString(reader); break; case "PlanktonDispersalThreshold": configuration.PlanktonDispersalThreshold = Common.Reader.ReadDouble(reader); break; case "CohortFunctionalGroupDefinitions": configuration.CohortFunctionalGroupDefinitions = JsonReadFunctionalGroupDefinitions(reader); break; case "StockFunctionalGroupDefinitions": configuration.StockFunctionalGroupDefinitions = JsonReadFunctionalGroupDefinitions(reader); break; case "ImpactCellIndices": configuration.ImpactCellIndices = Common.Reader.ReadArray(reader, Common.Reader.ReadInt); break; case "ImpactAll": configuration.ImpactAll = Common.Reader.ReadBoolean(reader); break; case "ScenarioParameters": configuration.ScenarioParameters = Common.Reader.ReadArray(reader, JsonReadScenarioParameters).ToList(); break; case "ScenarioIndex": configuration.ScenarioIndex = Common.Reader.ReadInt(reader); break; case "Simulation": configuration.Simulation = Common.Reader.ReadInt(reader); break; case "EcologicalParameters": configuration.EcologicalParameters = JsonReadEcologicalParameters(reader); break; case "FileNames": configuration.FileNames = Common.Reader.ReadArray(reader, Common.Reader.ReadString).ToList(); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } } return(configuration); }
public static void Serialize(Madingley.Common.Configuration configuration, TextWriter textWriter) { Action <Newtonsoft.Json.JsonWriter, Madingley.Common.FunctionalGroupDefinition> JsonAddPropertyFunctionalGroupDefinition = (JsonWriter, value) => { JsonWriter.WriteStartObject(); Common.Writer.PropertyKeyValuePairs(JsonWriter, "Definitions", value.Definitions, Common.Writer.PropertyString); Common.Writer.PropertyKeyValuePairs(JsonWriter, "Properties", value.Properties, Common.Writer.PropertyDouble); JsonWriter.WriteEndObject(); }; Action <Newtonsoft.Json.JsonWriter, string, Madingley.Common.FunctionalGroupDefinitions> JsonAddPropertyFunctionalGroupDefinitions = (JsonWriter, name, value) => { JsonWriter.WritePropertyName(name); JsonWriter.WriteStartObject(); Common.Writer.PropertyArray(JsonWriter, "Data", value.Data, JsonAddPropertyFunctionalGroupDefinition); Common.Writer.PropertyInlineArray(JsonWriter, "Definitions", value.Definitions, Common.Writer.WriteString); Common.Writer.PropertyInlineArray(JsonWriter, "Properties", value.Properties, Common.Writer.WriteString); JsonWriter.WriteEndObject(); }; Action <Newtonsoft.Json.JsonWriter, string, Madingley.Common.ScenarioParameter> JsonAddPropertyScenarioParameter = (JsonWriter, name, value) => { JsonWriter.WritePropertyName(name); JsonWriter.WriteStartObject(); Common.Writer.PropertyString(JsonWriter, "ParamString", value.ParamString); Common.Writer.PropertyDouble(JsonWriter, "ParamDouble1", value.ParamDouble1); Common.Writer.PropertyDouble(JsonWriter, "ParamDouble2", value.ParamDouble2); JsonWriter.WriteEndObject(); }; Action <Newtonsoft.Json.JsonWriter, Madingley.Common.ScenarioParameters> JsonAddScenarioParameter = (JsonWriter, value) => { JsonWriter.WriteStartObject(); Common.Writer.PropertyString(JsonWriter, "Label", value.Label); Common.Writer.PropertyInt(JsonWriter, "SimulationNumber", value.SimulationNumber); Common.Writer.PropertyKeyValuePairs(JsonWriter, "Parameters", value.Parameters, JsonAddPropertyScenarioParameter); JsonWriter.WriteEndObject(); }; Action <Newtonsoft.Json.JsonWriter, string, Madingley.Common.EcologicalParameters> JsonAddEcologicalParameters = (JsonWriter, name, ecologicalParameters) => { JsonWriter.WritePropertyName(name); JsonWriter.WriteStartObject(); Common.Writer.PropertyKeyValuePairs(JsonWriter, "Parameters", ecologicalParameters.Parameters, Common.Writer.PropertyDouble); Common.Writer.PropertyInlineArray(JsonWriter, "TimeUnits", ecologicalParameters.TimeUnits, Common.Writer.WriteString); JsonWriter.WriteEndObject(); }; using (var writer = new Newtonsoft.Json.JsonTextWriter(textWriter)) { writer.Formatting = Newtonsoft.Json.Formatting.Indented; writer.WriteStartObject(); Common.Writer.PropertyString(writer, "GlobalModelTimeStepUnit", configuration.GlobalModelTimeStepUnit); Common.Writer.PropertyInt(writer, "NumTimeSteps", configuration.NumTimeSteps); Common.Writer.PropertyInt(writer, "BurninTimeSteps", configuration.BurninTimeSteps); Common.Writer.PropertyInt(writer, "ImpactTimeSteps", configuration.ImpactTimeSteps); Common.Writer.PropertyInt(writer, "RecoveryTimeSteps", configuration.RecoveryTimeSteps); Common.Writer.PropertyBoolean(writer, "RunCellsInParallel", configuration.RunCellsInParallel); Common.Writer.PropertyBoolean(writer, "RunSimulationsInParallel", configuration.RunSimulationsInParallel); Common.Writer.PropertyString(writer, "RunRealm", configuration.RunRealm); Common.Writer.PropertyBoolean(writer, "DrawRandomly", configuration.DrawRandomly); Common.Writer.PropertyDouble(writer, "ExtinctionThreshold", configuration.ExtinctionThreshold); Common.Writer.PropertyInt(writer, "MaxNumberOfCohorts", configuration.MaxNumberOfCohorts); Common.Writer.PropertyBoolean(writer, "DispersalOnly", configuration.DispersalOnly); Common.Writer.PropertyString(writer, "DispersalOnlyType", configuration.DispersalOnlyType); Common.Writer.PropertyDouble(writer, "PlanktonDispersalThreshold", configuration.PlanktonDispersalThreshold); JsonAddPropertyFunctionalGroupDefinitions(writer, "CohortFunctionalGroupDefinitions", configuration.CohortFunctionalGroupDefinitions); JsonAddPropertyFunctionalGroupDefinitions(writer, "StockFunctionalGroupDefinitions", configuration.StockFunctionalGroupDefinitions); Common.Writer.PropertyInlineArray(writer, "ImpactCellIndices", configuration.ImpactCellIndices, Common.Writer.WriteInt); Common.Writer.PropertyBoolean(writer, "ImpactAll", configuration.ImpactAll); Common.Writer.PropertyArray(writer, "ScenarioParameters", configuration.ScenarioParameters, JsonAddScenarioParameter); Common.Writer.PropertyInt(writer, "ScenarioIndex", configuration.ScenarioIndex); Common.Writer.PropertyInt(writer, "Simulation", configuration.Simulation); JsonAddEcologicalParameters(writer, "EcologicalParameters", configuration.EcologicalParameters); Common.Writer.PropertyInlineArray(writer, "FileNames", configuration.FileNames, Common.Writer.WriteString); writer.WriteEndObject(); } }
/// <summary> /// Reads the initalization file to get information for the set of simulations to be run /// </summary> /// <param name="simulationInitialisationFilename">The name of the initialization file with information on the simulations to be run</param> /// <param name="definitionsFilename">Definitions file name</param> /// <param name="inputPath">The path to folder which contains the inputs</param> public static Madingley.Common.Configuration Load(string simulationInitialisationFilename, string definitionsFilename, string inputPath) { // Read the intialisation files and copy them to the output directory // Construct file names var simulationInitialisationFileName = System.IO.Path.Combine(inputPath, simulationInitialisationFilename); var definitionsFileName = System.IO.Path.Combine(inputPath, definitionsFilename); var configuration = new Madingley.Common.Configuration(); configuration.FileNames.Add(simulationInitialisationFileName); configuration.FileNames.Add(definitionsFileName); using (var reader = new StreamReader(simulationInitialisationFileName)) { // Discard the header var line = reader.ReadLine(); var headers = line.Split(new char[] { ',' }, 2); while (!reader.EndOfStream) { line = reader.ReadLine(); // Split fields by commas var fields = line.Split(new char[] { ',' }, 2); // Switch based on the name of the parameter, and write the value to the appropriate field switch (fields[0].ToLower()) { case "timestep units": configuration.GlobalModelTimeStepUnit = fields[1]; break; case "length of simulation (years)": configuration.NumTimeSteps = (int)Utilities.ConvertTimeUnits("year", configuration.GlobalModelTimeStepUnit) * Convert.ToInt32(fields[1]); break; case "burn-in (years)": configuration.BurninTimeSteps = (int)Utilities.ConvertTimeUnits("year", configuration.GlobalModelTimeStepUnit) * Convert.ToInt32(fields[1]); break; case "impact duration (years)": configuration.ImpactTimeSteps = (int)Utilities.ConvertTimeUnits("year", configuration.GlobalModelTimeStepUnit) * Convert.ToInt32(fields[1]); break; case "recovery duration (years)": configuration.RecoveryTimeSteps = (int)Utilities.ConvertTimeUnits("year", configuration.GlobalModelTimeStepUnit) * Convert.ToInt32(fields[1]); break; case "number timesteps": configuration.NumTimeSteps = Convert.ToInt32(fields[1]); break; case "run cells in parallel": switch (fields[1].ToLower()) { case "yes": configuration.RunCellsInParallel = true; break; case "no": configuration.RunCellsInParallel = false; break; } break; case "run simulations in parallel": switch (fields[1].ToLower()) { case "yes": configuration.RunSimulationsInParallel = true; break; case "no": configuration.RunSimulationsInParallel = false; break; } break; case "run single realm": configuration.RunRealm = fields[1].ToLower(); break; case "draw randomly": switch (fields[1].ToLower()) { case "yes": configuration.DrawRandomly = true; break; case "no": configuration.DrawRandomly = false; break; } break; case "extinction threshold": configuration.ExtinctionThreshold = Convert.ToDouble(fields[1]); break; case "maximum number of cohorts": configuration.MaxNumberOfCohorts = Convert.ToInt32(fields[1]); break; case "impact cell index": if (fields[1] != "") { var impactCellIndices = new List <int>(); string[] temp = fields[1].Split(new char[] { ';' }); foreach (string t in temp) { if (t.Split(new char[] { '-' }).Length > 1) { string[] range = t.Split(new char[] { '-' }); for (int ii = Convert.ToInt32(range[0]); ii <= Convert.ToInt32(range[1]); ii++) { impactCellIndices.Add(ii); } } else { impactCellIndices.Add(Convert.ToInt32(t)); } } configuration.ImpactCellIndices = impactCellIndices; } break; case "dispersal only": if (fields[1] == "yes") { configuration.DispersalOnly = true; } else { configuration.DispersalOnly = false; } break; case "dispersal only type": configuration.DispersalOnlyType = fields[1]; break; case "plankton size threshold": configuration.PlanktonDispersalThreshold = Convert.ToDouble(fields[1]); break; } } } // Read in the definitions data using (var reader = new StreamReader(definitionsFileName)) { // Discard the header var line = reader.ReadLine(); var headers = line.Split(new char[] { ',' }, 2); while (!reader.EndOfStream) { line = reader.ReadLine(); // Split fields by commas var fields = line.Split(new char[] { ',' }, 2); // Switch based on the name of the parameter, and write the value to the appropriate field switch (fields[0].ToLower()) { case "cohort functional group definitions file": { Console.WriteLine("Reading functional group definitions...\n"); // Open a the specified csv file and set up the cohort functional group definitions var functionalDefinitionsFileName = fields[1]; var fileName = System.IO.Path.Combine(inputPath, "Ecological Definition Files", functionalDefinitionsFileName); configuration.FileNames.Add(fileName); configuration.CohortFunctionalGroupDefinitions = FunctionalGroupDefinitionsSerialization.Load(fileName); } break; case "stock functional group definitions file": { // Open a the specified csv file and set up the stock functional group definitions var functionalDefinitionsFileName = fields[1]; var fileName = System.IO.Path.Combine(inputPath, "Ecological Definition Files", functionalDefinitionsFileName); configuration.FileNames.Add(fileName); configuration.StockFunctionalGroupDefinitions = FunctionalGroupDefinitionsSerialization.Load(fileName); } break; case "ecological parameters file": { var parametersFileName = fields[1]; var fileName = System.IO.Path.Combine(inputPath, "Ecological Definition Files", parametersFileName); configuration.FileNames.Add(fileName); configuration.EcologicalParameters = EcologicalParameters.Load(fileName); } break; } } } return(configuration); }
/// <summary> /// Reads the initalization file to get information for the set of simulations to be run /// </summary> /// <param name="simulationInitialisationFilename">The name of the initialization file with information on the simulations to be run</param> /// <param name="definitionsFilename">Definitions file name</param> /// <param name="inputPath">The path to folder which contains the inputs</param> public static Madingley.Common.Configuration Load(string simulationInitialisationFilename, string definitionsFilename, string inputPath) { // Read the intialisation files and copy them to the output directory // Construct file names var simulationInitialisationFileName = System.IO.Path.Combine(inputPath, simulationInitialisationFilename); var definitionsFileName = System.IO.Path.Combine(inputPath, definitionsFilename); var configuration = new Madingley.Common.Configuration(); configuration.FileNames.Add(simulationInitialisationFileName); configuration.FileNames.Add(definitionsFileName); using (var reader = new StreamReader(simulationInitialisationFileName)) { // Discard the header var line = reader.ReadLine(); var headers = line.Split(new char[] { ',' }, 2); while (!reader.EndOfStream) { line = reader.ReadLine(); // Split fields by commas var fields = line.Split(new char[] { ',' }, 2); // Switch based on the name of the parameter, and write the value to the appropriate field switch (fields[0].ToLower()) { case "timestep units": configuration.GlobalModelTimeStepUnit = fields[1]; break; case "length of simulation (years)": configuration.NumTimeSteps = (int)Utilities.ConvertTimeUnits("year", configuration.GlobalModelTimeStepUnit) * Convert.ToInt32(fields[1]); break; case "burn-in (years)": configuration.BurninTimeSteps = (int)Utilities.ConvertTimeUnits("year", configuration.GlobalModelTimeStepUnit) * Convert.ToInt32(fields[1]); break; case "impact duration (years)": configuration.ImpactTimeSteps = (int)Utilities.ConvertTimeUnits("year", configuration.GlobalModelTimeStepUnit) * Convert.ToInt32(fields[1]); break; case "recovery duration (years)": configuration.RecoveryTimeSteps = (int)Utilities.ConvertTimeUnits("year", configuration.GlobalModelTimeStepUnit) * Convert.ToInt32(fields[1]); break; case "number timesteps": configuration.NumTimeSteps = Convert.ToInt32(fields[1]); break; case "run cells in parallel": switch (fields[1].ToLower()) { case "yes": configuration.RunCellsInParallel = true; break; case "no": configuration.RunCellsInParallel = false; break; } break; case "run simulations in parallel": switch (fields[1].ToLower()) { case "yes": configuration.RunSimulationsInParallel = true; break; case "no": configuration.RunSimulationsInParallel = false; break; } break; case "run single realm": configuration.RunRealm = fields[1].ToLower(); break; case "draw randomly": switch (fields[1].ToLower()) { case "yes": configuration.DrawRandomly = true; break; case "no": configuration.DrawRandomly = false; break; } break; case "extinction threshold": configuration.ExtinctionThreshold = Convert.ToDouble(fields[1]); break; case "maximum number of cohorts": configuration.MaxNumberOfCohorts = Convert.ToInt32(fields[1]); break; case "impact cell index": if (fields[1] != "") { var impactCellIndices = new List<int>(); string[] temp = fields[1].Split(new char[] { ';' }); foreach (string t in temp) { if (t.Split(new char[] { '-' }).Length > 1) { string[] range = t.Split(new char[] { '-' }); for (int ii = Convert.ToInt32(range[0]); ii <= Convert.ToInt32(range[1]); ii++) { impactCellIndices.Add(ii); } } else { impactCellIndices.Add(Convert.ToInt32(t)); } } configuration.ImpactCellIndices = impactCellIndices; } break; case "dispersal only": if (fields[1] == "yes") configuration.DispersalOnly = true; else configuration.DispersalOnly = false; break; case "dispersal only type": configuration.DispersalOnlyType = fields[1]; break; case "plankton size threshold": configuration.PlanktonDispersalThreshold = Convert.ToDouble(fields[1]); break; } } } // Read in the definitions data using (var reader = new StreamReader(definitionsFileName)) { // Discard the header var line = reader.ReadLine(); var headers = line.Split(new char[] { ',' }, 2); while (!reader.EndOfStream) { line = reader.ReadLine(); // Split fields by commas var fields = line.Split(new char[] { ',' }, 2); // Switch based on the name of the parameter, and write the value to the appropriate field switch (fields[0].ToLower()) { case "cohort functional group definitions file": { Console.WriteLine("Reading functional group definitions...\n"); // Open a the specified csv file and set up the cohort functional group definitions var functionalDefinitionsFileName = fields[1]; var fileName = System.IO.Path.Combine(inputPath, "Ecological Definition Files", functionalDefinitionsFileName); configuration.FileNames.Add(fileName); configuration.CohortFunctionalGroupDefinitions = FunctionalGroupDefinitionsSerialization.Load(fileName); } break; case "stock functional group definitions file": { // Open a the specified csv file and set up the stock functional group definitions var functionalDefinitionsFileName = fields[1]; var fileName = System.IO.Path.Combine(inputPath, "Ecological Definition Files", functionalDefinitionsFileName); configuration.FileNames.Add(fileName); configuration.StockFunctionalGroupDefinitions = FunctionalGroupDefinitionsSerialization.Load(fileName); } break; case "ecological parameters file": { var parametersFileName = fields[1]; var fileName = System.IO.Path.Combine(inputPath, "Ecological Definition Files", parametersFileName); configuration.FileNames.Add(fileName); configuration.EcologicalParameters = EcologicalParameters.Load(fileName); } break; } } } return configuration; }
public static Madingley.Common.Configuration Deserialize(TextReader sr) { Func<Newtonsoft.Json.JsonTextReader, Madingley.Common.FunctionalGroupDefinition> JsonReadFunctionalGroupDefinition = (reader) => { var ret = new Madingley.Common.FunctionalGroupDefinition(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "Definitions": ret.Definitions = Common.Reader.ReadKeyValuePairs(reader, Common.Reader.ReadString); break; case "Properties": ret.Properties = Common.Reader.ReadKeyValuePairs(reader, Common.Reader.ReadDouble); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return ret; }; Func<Newtonsoft.Json.JsonTextReader, Madingley.Common.FunctionalGroupDefinitions> JsonReadFunctionalGroupDefinitions = (reader) => { var ret = new Madingley.Common.FunctionalGroupDefinitions(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "Data": ret.Data = Common.Reader.ReadArray(reader, JsonReadFunctionalGroupDefinition); break; case "Definitions": ret.Definitions = Common.Reader.ReadArray(reader, Common.Reader.ReadString); break; case "Properties": ret.Properties = Common.Reader.ReadArray(reader, Common.Reader.ReadString); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return ret; }; Func<Newtonsoft.Json.JsonTextReader, Madingley.Common.ScenarioParameter> JsonReadScenarioParameter = (reader) => { var ret = new Madingley.Common.ScenarioParameter(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "ParamString": ret.ParamString = Common.Reader.ReadString(reader); break; case "ParamDouble1": ret.ParamDouble1 = Common.Reader.ReadDouble(reader); break; case "ParamDouble2": ret.ParamDouble2 = Common.Reader.ReadDouble(reader); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return ret; }; Func<Newtonsoft.Json.JsonTextReader, IDictionary<string, Madingley.Common.ScenarioParameter>> JsonReadKVPScenarioParameter = (reader) => { var ret = new Dictionary<string, Madingley.Common.ScenarioParameter>(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var key = Convert.ToString(reader.Value); reader.Read(); var value = JsonReadScenarioParameter(reader); ret.Add(key, value); } return ret; }; Func<Newtonsoft.Json.JsonTextReader, Madingley.Common.ScenarioParameters> JsonReadScenarioParameters = (reader) => { var ret = new Madingley.Common.ScenarioParameters(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "Label": ret.Label = Common.Reader.ReadString(reader); break; case "SimulationNumber": ret.SimulationNumber = Common.Reader.ReadInt(reader); break; case "Parameters": ret.Parameters = JsonReadKVPScenarioParameter(reader); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return ret; }; Func<Newtonsoft.Json.JsonTextReader, Madingley.Common.EcologicalParameters> JsonReadEcologicalParameters = (reader) => { var ret = new Madingley.Common.EcologicalParameters(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "Parameters": ret.Parameters = Common.Reader.ReadKeyValuePairs(reader, Common.Reader.ReadDouble); break; case "TimeUnits": ret.TimeUnits = Common.Reader.ReadArray(reader, Common.Reader.ReadString); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } return ret; }; var configuration = new Madingley.Common.Configuration(); using (var reader = new Newtonsoft.Json.JsonTextReader(sr)) { reader.Read(); Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.StartObject); while (reader.Read() && reader.TokenType != Newtonsoft.Json.JsonToken.EndObject) { Debug.Assert(reader.TokenType == Newtonsoft.Json.JsonToken.PropertyName); Debug.Assert(reader.ValueType == typeof(string)); var property = Convert.ToString(reader.Value); reader.Read(); switch (property) { case "GlobalModelTimeStepUnit": configuration.GlobalModelTimeStepUnit = Common.Reader.ReadString(reader); break; case "NumTimeSteps": configuration.NumTimeSteps = Common.Reader.ReadInt(reader); break; case "BurninTimeSteps": configuration.BurninTimeSteps = Common.Reader.ReadInt(reader); break; case "ImpactTimeSteps": configuration.ImpactTimeSteps = Common.Reader.ReadInt(reader); break; case "RecoveryTimeSteps": configuration.RecoveryTimeSteps = Common.Reader.ReadInt(reader); break; case "RunCellsInParallel": configuration.RunCellsInParallel = Common.Reader.ReadBoolean(reader); break; case "RunSimulationsInParallel": configuration.RunSimulationsInParallel = Common.Reader.ReadBoolean(reader); break; case "RunRealm": configuration.RunRealm = Common.Reader.ReadString(reader); break; case "DrawRandomly": configuration.DrawRandomly = Common.Reader.ReadBoolean(reader); break; case "ExtinctionThreshold": configuration.ExtinctionThreshold = Common.Reader.ReadDouble(reader); break; case "MaxNumberOfCohorts": configuration.MaxNumberOfCohorts = Common.Reader.ReadInt(reader); break; case "DispersalOnly": configuration.DispersalOnly = Common.Reader.ReadBoolean(reader); break; case "DispersalOnlyType": configuration.DispersalOnlyType = Common.Reader.ReadString(reader); break; case "PlanktonDispersalThreshold": configuration.PlanktonDispersalThreshold = Common.Reader.ReadDouble(reader); break; case "CohortFunctionalGroupDefinitions": configuration.CohortFunctionalGroupDefinitions = JsonReadFunctionalGroupDefinitions(reader); break; case "StockFunctionalGroupDefinitions": configuration.StockFunctionalGroupDefinitions = JsonReadFunctionalGroupDefinitions(reader); break; case "ImpactCellIndices": configuration.ImpactCellIndices = Common.Reader.ReadArray(reader, Common.Reader.ReadInt); break; case "ImpactAll": configuration.ImpactAll = Common.Reader.ReadBoolean(reader); break; case "ScenarioParameters": configuration.ScenarioParameters = Common.Reader.ReadArray(reader, JsonReadScenarioParameters).ToList(); break; case "ScenarioIndex": configuration.ScenarioIndex = Common.Reader.ReadInt(reader); break; case "Simulation": configuration.Simulation = Common.Reader.ReadInt(reader); break; case "EcologicalParameters": configuration.EcologicalParameters = JsonReadEcologicalParameters(reader); break; case "FileNames": configuration.FileNames = Common.Reader.ReadArray(reader, Common.Reader.ReadString).ToList(); break; default: throw new Exception(string.Format("Unexpected property: {0}", property)); } } } return configuration; }
public static Madingley.Common.IOutput Create( Madingley.Common.RunState state, Madingley.Common.Configuration configuration, Madingley.Common.Environment environment, Madingley.Common.ModelState modelState) { // Specify the working directory string OutputDir = "Parameters"; OutputDir += System.DateTime.Now.Year + "-" + System.DateTime.Now.Month + "-" + System.DateTime.Now.Day + "_" + System.DateTime.Now.Hour + "." + System.DateTime.Now.Minute + "." + System.DateTime.Now.Second + "/"; // Create the working directory if this does not already exist System.IO.Directory.CreateDirectory(OutputDir); foreach (var sourceFileName in configuration.FileNames) { var fileName = System.IO.Path.GetFileName(sourceFileName); var destFileName = System.IO.Path.Combine(OutputDir, fileName); System.IO.File.Copy(sourceFileName, destFileName, true); } foreach (var sourceFileName in environment.FileNames) { var fileName = System.IO.Path.GetFileName(sourceFileName); var destFileName = System.IO.Path.Combine(OutputDir, fileName); System.IO.File.Copy(sourceFileName, destFileName, true); } // Set up the suffix for the output files var OutputFilesSuffix = "_"; // Add the scenario label to the suffix for the output files OutputFilesSuffix += configuration.ScenarioParameters[configuration.ScenarioIndex].Label + "_"; // Add the simulation index number to the suffix OutputFilesSuffix += configuration.Simulation.ToString(); var i = new MadingleyModelInitialisation(simulationInitialisationFile, definitionsFilename, outputsFilename, outputPath, inputPath); i.OutputPath = OutputDir; var output = new Madingley.Output.MadingleyModelOutput( OutputFilesSuffix, environment, configuration, i, modelState); if (state != null) { var existing = (Madingley.Output.MadingleyModelOutput)state.Output; output.model.Copy(existing.model); } return(output); }