/// <summary> /// Adds a vector of double values to a one-dimensional variable in an SDS object /// </summary> /// <param name="dataToConvert">The vector of values to add</param> /// <param name="outputVariableName">The name of the variable to write to</param> /// <param name="dimensionName">The name of the dimension variable of the output variable</param> /// <param name="dimensionValues">The values of the dimension variable</param> /// <param name="missingValue">The value used for missing data</param> /// <param name="SDSObject">The SDS object to write to</param> public void VectorToSDS1D(double[] dataToConvert, string outputVariableName, string dimensionName, float[] dimensionValues, double missingValue, DataSet SDSObject) { // If not already contained in the SDS object, add the dimension variable if (SDSObject.Variables.Contains(dimensionName)) { } else { SDSObject.AddVariable <float>(dimensionName, dimensionValues, dimensionName); } // If not already contained in the SDS object, add the output variable if (SDSObject.Variables.Contains(outputVariableName)) { SDSObject.PutData <double[]>(outputVariableName, dataToConvert); SDSObject.Commit(); } else { // Set up the dimensions and add the gridded data string[] dimensions = { dimensionName }; var DataGrid = SDSObject.AddVariable <double>(outputVariableName, dataToConvert, dimensions); // Add appropriate metadata (including missing values) DataGrid.Metadata["DisplayName"] = outputVariableName; DataGrid.Metadata["MissingValue"] = (double)missingValue; // Commit changes SDSObject.Commit(); } }
protected override DataSet ServerProcessInternal(DataSet ds) { if (serviceUri == "(local)") { return(LocalProcess(ds)); } string hash = DataSetDiskCache.ComputeHash(ds); DataSet proxyDataSet = null; // Creating new DataSet at the service. // TODO: fix following: try { try { proxyDataSet = ProxyDataSet.CreateProxySync(taskQueue, ServicePort, "msds:memory", false, 10 * 60 * 1000); } catch (CommunicationObjectFaultedException) { //Connection to server closed. //Recreate service port and try again. if (proxyDataSet != null && !proxyDataSet.IsDisposed) { proxyDataSet.Dispose(); } this._servicePort = null; proxyDataSet = ProxyDataSet.CreateProxySync(taskQueue, ServicePort, "msds:memory", false, 10 * 60 * 1000); } AutoResetEvent completed = new AutoResetEvent(false); OnCommittedHandler onCommitted = new OnCommittedHandler(completed, OnDataSetCommitted); proxyDataSet.Committed += onCommitted.Handler; proxyDataSet.IsAutocommitEnabled = false; FetchClimateRequestBuilder.CopyRequestedDataSet(ds, proxyDataSet, false); proxyDataSet.Metadata[Namings.metadataNameHash] = hash; proxyDataSet.Commit(); if (proxyDataSet.HasChanges) { proxyDataSet.Commit(); } completed.WaitOne(); proxyDataSet.IsAutocommitEnabled = true; return(proxyDataSet); } catch { if (proxyDataSet != null && !proxyDataSet.IsDisposed) { proxyDataSet.Dispose(); } throw; } }
/// <summary>Inits structure of dataset and performs one modelling step</summary> private void InitModel() { dataset = DataSet.Open("msds:memory"); dataset.IsAutocommitEnabled = false; dataset.Add <double[]>("sin"); dataset.Add <double[]>("cos"); dataset.Add <double[]>("x"); dataset.Add <double>("phase"); dataset.Commit(); dataset.PutData <double>("phase", 0); dataset.PutAttr("cos", "VisualHints", "Style:Points(sin,x)"); dataset.Commit(); OneStep(); }
public MainWindow() { InitializeComponent(); // Create memory dataset with one variable DataSet ds = DataSet.Open("msds:memory"); ds.AddVariable <double>("values", "i", "j"); // Compute nice-looking data const int Size = 100; double[,] values = new double[Size, Size]; Random r = new Random(); for (int i = 0; i < Size; i++) { for (int j = 0; j < Size; j++) { values[i, j] = Math.Sin((Math.Sqrt((i - Size / 2) * (i - Size / 2) + (j - Size / 2) * (j - Size / 2)) + r.NextDouble()) / Math.Sqrt(Size)); } } // Put data to variable ds.PutData("values", values); ds.Commit(); // Assign dataset to view dsvc.DataSet = ds; // Show colormap first colorMap.IsChecked = true; }
public MainWindow() { InitializeComponent(); // Create dataset with three variables sharing one dimension DataSet ds = DataSet.Open("msds:memory"); ds.Add <double[]>("x"); ds.Add <double[]>("sin"); ds.Add <double[]>("cos"); // Populate dataset with data. Following code is short, but not very fast. // It is better to fill array and use one PutData method instead of appending in loop. for (double x = 0; x < 2 * Math.PI; x += 0.01) { ds.Append("x", x); ds.Append("sin", Math.Sin(x)); ds.Append("cos", Math.Cos(x)); } // Hint at best visualization for some variable ds.PutAttr("sin", "VisualHints", "Style:Polyline(x); Stroke:Orange; Thickness:3"); ds.PutAttr("cos", "VisualHints", "Style:Markers(x); Marker:Circle; Color:Blue; Size:10"); ds.Commit(); // Attach data to visualizer dsvc.DataSet = ds; }
/// <summary> /// Adds a one-dimensional variable to the specified SDS object with string dimension data /// </summary> /// <param name="SDSObject">A reference to an SDS object</param> /// <param name="variableName">The name of the variable to create</param> /// <param name="numDimensions">The number of dimensions for the new variable</param> /// <param name="namesDimensions">A vector of names of the dimensions for the variable</param> /// <param name="missingValue">The missing value to apply to the new variable</param> /// <param name="dimension1Data">A string vector of values of the first dimension</param> public void AddVariable(DataSet SDSObject, string variableName, int numDimensions, string[] namesDimensions, double missingValue, string[] dimension1Data) { //If the wrong number of dimension names have been provided, then return error if (namesDimensions.Length != numDimensions) { Debug.Fail("Error: you have provided the wrong number of dimension names"); } //Since this overload method deals with one-dimensional variables, return an error if this is not the case if (numDimensions != 1) { Debug.Fail("Error: you have provided data for the wrong number of dimensions"); } //If the variable already exists in the SDS, then return, otherwise create new variable if (SDSObject.Variables.Contains(variableName)) { Console.WriteLine("SDS object already contains a variable with that name. Skipping..."); } else { //For each dimension, if it already exists in the SDS then no action, otherwise create a new variable and populate it with the provided data if (SDSObject.Variables.Contains(namesDimensions[0])) { } else { SDSObject.AddVariable <string>(namesDimensions[0], dimension1Data, namesDimensions[0]); } //If the variable is the same as one of the entered dimensions, then take no action, otherwise create the new variable and populate it with missing values if (SDSObject.Variables.Contains(variableName)) { } else { //Create array of missing values of the correct dimensions double[] tempOutData = new double[dimension1Data.Length]; for (int ii = 0; ii < dimension1Data.Length; ii++) { tempOutData[ii] = missingValue; } //Add variable to SDS var testOut = SDSObject.AddVariable <double>(variableName, tempOutData, namesDimensions); //Metadata required by SDS testOut.Metadata["DisplayName"] = variableName; testOut.Metadata["MissingValue"] = missingValue; //Commit changes SDSObject.Commit(); } } }
public static void FillDataSetWithStatusCheckParams(DataSet ds, int expectedCalculationTime, string hash, bool resendRequest, bool commit) { ds.Metadata[Namings.restApiMetadataNameExpectedCalculationTime] = expectedCalculationTime; ds.Metadata[Namings.restApiMetadataNameHash] = hash; ds.Metadata[Namings.restApiMetadataNameResendRequest] = resendRequest; if (commit) { ds.Commit(); } }
/// <summary> /// Adds a geographical array of values to a two-dimensional variable in an SDS object /// </summary> /// <param name="dataToConvert">The array of values to add</param> /// <param name="ouputVariableName">The name of the variable to write to</param> /// <param name="lats">The values of the latitude dimension variable</param> /// <param name="lons">The values of the longitude dimension variable</param> /// <param name="missingValue">The value used for missing data</param> /// <param name="SDSObject">The SDS object to write to</param> public void Array2DToSDS2D(double[,] dataToConvert, string ouputVariableName, float[] lats, float[] lons, double missingValue, DataSet SDSObject) { // If not already contained in the SDS, add the dimension information (geographic coordinates) if (SDSObject.Variables.Contains("Latitude")) { } else { SDSObject.AddVariable <float>("Latitude", lats, "Lat"); } if (SDSObject.Variables.Contains("Longitude")) { } else { SDSObject.AddVariable <float>("Longitude", lons, "Lon"); } // If the SDS object contains the variable to write to, then simply add the data, otherwise add a new variable and then add the data if (SDSObject.Variables.Contains(ouputVariableName)) { // Add the data SDSObject.PutData <double[, ]>(ouputVariableName, dataToConvert); // Commit the changes SDSObject.Commit(); } else { // Set up the dimensions and add the gridded data string[] dimensions = { "Lat", "Lon" }; var DataGrid = SDSObject.AddVariable <double>(ouputVariableName, dataToConvert, dimensions); // Add appropriate metadata (including missing values) DataGrid.Metadata["DisplayName"] = ouputVariableName; DataGrid.Metadata["MissingValue"] = (double)missingValue; // Commit changes to update data set SDSObject.Commit(); } }
public void TestMethod1() { int r = 360; int z = 126; int t = 24 * 60; Int16[,,] grid = new Int16[r, z, t]; int[] R = new int[r]; int[] Z = new int[z]; DateTime[] T = new DateTime[t]; Random rnd = new Random(); for (int k = 0; k < r; k++) { R[k] = k; for (int i = 0; i < z; i++) { for (int j = 0; j < t; j++) { grid[k, i, j] = (Int16)rnd.Next(1024); } } } for (int j = 0; j < t; j++) { T[j] = new DateTime(2012, 1, 1).AddMinutes(j); } for (int i = 0; i < z; i++) { Z[i] = i; } // ... compute grid, x and y values DataSet ds = DataSet.Open(NetCDFFileName + "?openMode=create"); ds.Add("R", "degrees", R, "R"); ds.Add("Z", "distance", Z, "Z"); ds.Add("T", T, "T"); var val = ds.Add("values", "count", grid, "R", "Z", "T"); //ds.PutAttr("values", "units", "m/sec2"); ds.Commit(); //ds.Clone(NetCDFFileName + "?openMode=create").Dispose(); ds.Dispose(); }
public void Init(string constructionString) { dataSet = DataSet.Create(constructionString); //Инициализируем DataSet Variable X = dataSet.AddVariable <double>("X", "x"); Variable Y = dataSet.AddVariable <double>("Y", "y"); Variable Z = dataSet.AddVariable <double>("Z", "z"); time = dataSet.AddVariable <double>("Time", "t"); u = dataSet.AddVariable <double>("U velocity", "x", "y", "z", "t"); v = dataSet.AddVariable <double>("V velocity", "x", "y", "z", "t"); w = dataSet.AddVariable <double>("W velocity", "x", "y", "z", "t"); T = dataSet.AddVariable <double>("Temperature", "x", "y", "z", "t"); div = dataSet.AddVariable <double>("Divergence", "x", "y", "z", "t"); double[] wArr = new double[modellingParams.Nx]; for (int i = 0; i < modellingParams.Nx; i++) { wArr[i] = i * modellingParams.Dx; } X.PutData(wArr); wArr = new double[modellingParams.Ny]; for (int i = 0; i < modellingParams.Ny; i++) { wArr[i] = i * modellingParams.Dy; } Y.PutData(wArr); wArr = new double[modellingParams.Nz]; for (int i = 0; i < modellingParams.Nz; i++) { wArr[i] = i * modellingParams.Dz; } Z.PutData(wArr); //Инициализируем рассчетный модуль для слоя начальными условиями solver = new LayerSolver(prevData, modellingParams); u.Append(prevData.U.ToArray(), "t"); v.Append(prevData.V.ToArray(), "t"); w.Append(prevData.W.ToArray(), "t"); T.Append(prevData.T.ToArray(), "t"); div.Append(prevData.Div.ToArray(), "t"); time.PutData(new double[1] { 0 }); dataSet.Commit(); }
/// <summary> /// Adds a double value to an one-dimensional variable in an SDS object at the specified offset in the dimension /// </summary> /// <param name="dataToConvert">The value to add to the SDS object</param> /// <param name="outputVariableName">The name of the variable to add the data to</param> /// <param name="dimensionName">The name of the dimension variable of the output variable</param> /// <param name="missingValue">The value used for missing data</param> /// <param name="SDSObject">The SDS object to write to</param> /// <param name="dimensionOffset">The required offset in the dimension variable</param> public void ValueToSDS1D(double dataToConvert, string outputVariableName, string dimensionName, double missingValue, DataSet SDSObject, int dimensionOffset) { // Check that the dimension variables and the output variable have been defined already Debug.Assert(SDSObject.Variables.Contains(dimensionName), "Error: where an offset is included, dimension information must be defined before adding data"); Debug.Assert(SDSObject.Variables.Contains(outputVariableName), "Error: where an offset is included, the variable must be created before adding data"); // Add the data to the SDS object SDSObject.PutData <double>(outputVariableName, dataToConvert, DataSet.ReduceDim(dimensionOffset)); // Commit the SDS object SDSObject.Commit(); }
/// <summary> /// Outputs a two-dimensional array to a three-dimensional variable in an SDS object, with specified offset for the third dimension /// </summary> /// <param name="dataToConvert">An array of values to output</param> /// <param name="newVariableName">The name of the variable to be created or written to</param> /// <param name="dimensionNames">A vector containing the names of the dimensions of the output variable</param> /// <param name="thirdDimensionOffset">The offset to be applied in the third dimension</param> /// <param name="missingValue">The missing value to be used</param> /// <param name="SDSObject">A reference to an SDS object</param> public void Array2DToSDS3D(double[,] dataToConvert, string newVariableName, string[] dimensionNames, int thirdDimensionOffset, double missingValue, DataSet SDSObject) { // Check that the length of the vector of dimension names equals the number of dimensions Debug.Assert(dimensionNames.Length == 3, "There should be three dimension names passed to this method"); // Check that the dimension information has been defined already foreach (string dimensionName in dimensionNames) { Debug.Assert(SDSObject.Variables.Contains(dimensionName), "Error: where an offset is included, dimension information must be defined before adding data"); } Debug.Assert(SDSObject.Variables.Contains(newVariableName), "Error: where an offset is included, target variable must be created before adding data"); SDSObject.PutData <double[, ]>(newVariableName, dataToConvert, DataSet.FromToEnd(0), DataSet.FromToEnd(0), DataSet.ReduceDim(thirdDimensionOffset)); SDSObject.Commit(); }
public static void SaveToNetCDF(this DFSBase dfs, string NetCDFFileName) { double[,,] grid = new double[360, 720, 10]; double[] x = new double[360]; double[] y = new double[720]; double[] t = new double[10]; for (int k = 0; k < 10; k++) { for (int i = 0; i < 360; i++) { x[i] = i; for (int j = 0; j < 720; j++) { y[j] = j; grid[i, j, k] = i ^ 2 + j ^ 2; } } } // ... compute grid, x and y values DataSet ds = DataSet.Open(NetCDFFileName + "?openMode=create"); // MemoryDataSet ds = new MemoryDataSet(); int vid = ds.AddVariable <Int16>("values", "x", "y", "t").ID; //ds.Append("values", grid, 2); //ds.IsAutocommitEnabled = false; //for (int i = 0; i < 10; i++) //{ // ds.Append("values", grid, 2); // ds.Append("t", i*1.1,0); //} ds.Commit(); //ds.Clone(NetCDFFileName + "?openMode=create").Dispose(); ds.Dispose(); }
public static void CopyRequestedDataSet(DataSet inDs, DataSet outDs, bool commit) { foreach (var entry in inDs.Metadata) outDs.Metadata[entry.Key] = entry.Value; if (!outDs.Variables.Contains(Namings.VarNameLatMax)) outDs.AddVariable<double>(Namings.VarNameLatMax, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameLatMin)) outDs.AddVariable<double>(Namings.VarNameLatMin, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameLonMax)) outDs.AddVariable<double>(Namings.VarNameLonMax, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameLonMin)) outDs.AddVariable<double>(Namings.VarNameLonMin, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameDayMax)) outDs.AddVariable<int>(Namings.VarNameDayMax, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameDayMin)) outDs.AddVariable<int>(Namings.VarNameDayMin, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameHourMax)) outDs.AddVariable<int>(Namings.VarNameHourMax, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameHourMin)) outDs.AddVariable<int>(Namings.VarNameHourMin, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameYearMax)) outDs.AddVariable<int>(Namings.VarNameYearMax, Namings.dimNameCells); if (!outDs.Variables.Contains(Namings.VarNameYearMin)) outDs.AddVariable<int>(Namings.VarNameYearMin, Namings.dimNameCells); outDs.Variables[Namings.VarNameDayMax].Append(inDs.Variables[Namings.VarNameDayMax].GetData()); outDs.Variables[Namings.VarNameDayMin].Append(inDs.Variables[Namings.VarNameDayMin].GetData()); outDs.Variables[Namings.VarNameHourMax].Append(inDs.Variables[Namings.VarNameHourMax].GetData()); outDs.Variables[Namings.VarNameHourMin].Append(inDs.Variables[Namings.VarNameHourMin].GetData()); outDs.Variables[Namings.VarNameYearMax].Append(inDs.Variables[Namings.VarNameYearMax].GetData()); outDs.Variables[Namings.VarNameYearMin].Append(inDs.Variables[Namings.VarNameYearMin].GetData()); outDs.Variables[Namings.VarNameLonMax].Append(inDs.Variables[Namings.VarNameLonMax].GetData()); outDs.Variables[Namings.VarNameLonMin].Append(inDs.Variables[Namings.VarNameLonMin].GetData()); outDs.Variables[Namings.VarNameLatMax].Append(inDs.Variables[Namings.VarNameLatMax].GetData()); outDs.Variables[Namings.VarNameLatMin].Append(inDs.Variables[Namings.VarNameLatMin].GetData()); if (commit) { outDs.Commit(); } }
private void MergeCSV(DataSet dst, Tuple <string, string[]>[] requests) { var jobManager = WebApiApplication.GetSharedJobManager(Request); var dsr = requests.Select(r => new Tuple <DataSet, string[]>(DataSet.Open(jobManager.ResultDataSetUri(r.Item1, false)), r.Item2)).ToArray(); try { var config = WebApiApplication.GetFetchConfiguration(DateTime.MaxValue); TableExportHelper.MergeTable(config, dst, dsr); dst.Commit(); } finally { foreach (var r in dsr) { r.Item1.Dispose(); } } }
/// <summary> /// Adds a vector of values to a two-dimensional variable in an SDS object at the specified offset in the first dimension /// </summary> /// <param name="dataToConvert">The vector of values to add</param> /// <param name="outputVariableName">The name of the variable to write to</param> /// <param name="dimensionNames">A vector containing the names of the dimension variables</param> /// <param name="dimension1Data">The values of the first dimension variable</param> /// <param name="dimension2Data">The values of the second dimension</param> /// <param name="missingValue">The value used for missing data</param> /// <param name="SDSObject">The SDS object to write to</param> /// <param name="dimension1Offset">The required offset in the first dimension</param> public void VectorToSDS2D(double[] dataToConvert, string outputVariableName, string[] dimensionNames, float[] dimension1Data, float[] dimension2Data, double missingValue, DataSet SDSObject, int dimension1Offset) { // Check that the length of the vector of dimension names equals the number of dimensions Debug.Assert(dimensionNames.Length == 2, "There should be two dimension names passed to this method"); // Check that the dimension information has been defined already foreach (string dimensionName in dimensionNames) { Debug.Assert(SDSObject.Variables.Contains(dimensionName), "Error: where an offset is included, dimension information must be defined before adding data"); } // Check that the output variable has been defined already Debug.Assert(SDSObject.Variables.Contains(outputVariableName), "Error: where an offset is included, the variable must be created before adding data"); // Add the data to the SDS object SDSObject.PutData <double[]>(outputVariableName, dataToConvert, DataSet.ReduceDim(dimension1Offset), DataSet.FromToEnd(0)); // Commit the SDS object SDSObject.Commit(); }
public void PerformIteration(int i) { //Основной рассчет //for (int i = 1; i < modellingParams.Nt; i++) //{ LayerData result = solver.Solve(true); //Кладем данные в DataSet u.Append(result.U.ToArray(), "t"); v.Append(result.V.ToArray(), "t"); w.Append(result.W.ToArray(), "t"); T.Append(result.T.ToArray(), "t"); div.Append(result.Div.ToArray(), "t"); time.Append(new double[1] { (double)i / modellingParams.Nt }); dataSet.Commit(); //Переходим на следующий слой solver = new LayerSolver(prevData, result, modellingParams); prevData = result; double temp = 0; int count = result.Width * result.Height * result.Thickness; for (int ii = 1; ii < result.Width; ii++) { for (int jj = 1; jj < result.Height; jj++) { for (int kk = 1; kk < result.Thickness; kk++) { temp += result.Div[ii, jj, kk]; } } } temp = temp / count * modellingParams.Dx * modellingParams.Dy * modellingParams.Dz; Console.WriteLine((double)i / modellingParams.Nt * 100 + "% Error = " + temp); //} //dataSet.Commit(); }
/// <summary>Performs one modelling step</summary> private void OneStep() { double phase = dataset.GetData <double>("phase"); const int Size = 300; double[] x = new double[Size]; double[] sin = new double[Size]; double[] cos = new double[Size]; for (int i = 0; i < Size; i++) { x[i] = 2 * Math.PI * i / Size; sin[i] = Math.Sin(x[i] + phase); cos[i] = Math.Cos(x[i] + phase); } phase += 0.01; dataset.PutData("x", x); dataset.PutData("sin", sin); dataset.PutData("cos", cos); dataset.PutData("phase", phase); dataset.Commit(); }
protected override DataSet LocalProcess(DataSet ds) { DataSet resultDs = null; try { //Microsoft.Research.Science.Data.Factory.DataSetFactory.Register(typeof(Microsoft.Research.Science.Data.Memory2.ChunkedMemoryDataSet)); resultDs = DataSet.Open("msds:memory"); resultDs.IsAutocommitEnabled = false; FetchClimateRequestBuilder.CopyRequestedDataSet(ds, resultDs, false); if (resultDs.HasChanges) { resultDs.Commit(); } Microsoft.Research.Science.Data.Climate.Processing.ClimateRequestProcessor.Process(resultDs, 0); if (FetchClimateRequestBuilder.IsProcessingSuccessful(resultDs)) { ;//cache.Add(ds,ComputeHash(request)); } else if (!FetchClimateRequestBuilder.IsProcessingFailed(resultDs)) { throw new Exception("Processor hasn't finished the work."); } resultDs.IsAutocommitEnabled = true; return(resultDs); } catch { if (resultDs != null && !resultDs.IsDisposed) { resultDs.Dispose(); } throw; } }
public void OutputCurrentModelState(ModelGrid currentModelGrid, FunctionalGroupDefinitions functionalGroupHandler, List <uint[]> cellIndices, uint currentTimestep, int maximumNumberOfCohorts, string filename) { float[] Latitude = currentModelGrid.Lats; float[] Longitude = currentModelGrid.Lons; float[] CohortFunctionalGroup = new float[functionalGroupHandler.GetNumberOfFunctionalGroups()]; for (int fg = 0; fg < CohortFunctionalGroup.Length; fg++) { CohortFunctionalGroup[fg] = fg; } int CellCohortNumber = 0; GridCellCohortHandler CellCohorts; for (int cellIndex = 0; cellIndex < cellIndices.Count; cellIndex++) { CellCohorts = currentModelGrid.GetGridCellCohorts(cellIndices[cellIndex][0], cellIndices[cellIndex][1]); for (int i = 0; i < CellCohorts.Count; i++) { if (CellCohorts[i].Count > CellCohortNumber) { CellCohortNumber = CellCohorts[i].Count; } } } int MaxNumberCohorts = Math.Max(CellCohortNumber, maximumNumberOfCohorts); float[] Cohort = new float[MaxNumberCohorts]; for (int c = 0; c < Cohort.Length; c++) { Cohort[c] = c; } //Define an array for stock functional group - there are only three currently float[] StockFunctionalGroup = new float[] { 1, 2, 3 }; //Define an array for index of stocks - there is only one currently float[] Stock = new float[] { 1 }; string Filename = filename + "_" + currentTimestep.ToString() + Simulation.ToString(); StateOutput = SDSCreator.CreateSDS("netCDF", Filename, _OutputPath); //Define the cohort properties for output string[] CohortProperties = new string[] { "JuvenileMass", "AdultMass", "IndividualBodyMass", "IndividualReproductivePotentialMass", "CohortAbundance", "BirthTimeStep", "MaturityTimeStep", "LogOptimalPreyBodySizeRatio", "MaximumAchievedBodyMass", "Merged", "TrophicIndex", "ProportionTimeActive" }; //define the dimensions for cohort outputs string[] dims = new string[] { "Latitude", "Longitude", "Cohort Functional Group", "Cohort" }; // Add the variables for each cohort property // Then calculate the state for this property and put the data to this variable foreach (string v in CohortProperties) { DataConverter.AddVariable(StateOutput, "Cohort" + v, 4, dims, currentModelGrid.GlobalMissingValue, Latitude, Longitude, CohortFunctionalGroup, Cohort); StateOutput.PutData <double[, , , ]>("Cohort" + v, CalculateCurrentCohortState(currentModelGrid, v, Latitude.Length, Longitude.Length, CohortFunctionalGroup.Length, Cohort.Length, cellIndices)); StateOutput.Commit(); } //Define the stock properties for output string[] StockProperties = new string[] { "IndividualBodyMass", "TotalBiomass" }; //define the dimensions for cohort outputs dims = new string[] { "Latitude", "Longitude", "Stock Functional Group", "Stock" }; // Add the variables for each stock property // Then calculate the state for this property and put the data to this variable foreach (string v in StockProperties) { DataConverter.AddVariable(StateOutput, "Stock" + v, 4, dims, currentModelGrid.GlobalMissingValue, Latitude, Longitude, StockFunctionalGroup, Stock); StateOutput.PutData <double[, , , ]>("Stock" + v, CalculateCurrentStockState(currentModelGrid, v, Latitude.Length, Longitude.Length, StockFunctionalGroup.Length, Stock.Length, cellIndices)); StateOutput.Commit(); } //Close this data set StateOutput.Dispose(); }
public static DataSet Clone(DataSet src, DataSetUri dstUri) { if (src == null) { throw new ArgumentNullException("src"); } // Maximum memory capacity in bytes int N = 200 * 1024 * 1024; // Estimated size of a single string in bytes int sizeofString = 100 * 1024; /*********************************************************************************** * Preparing output ***********************************************************************************/ DataSet dst = DataSet.Open(dstUri); if (dst.IsReadOnly) { throw new NotSupportedException("Output DataSet is read-only"); } dst.IsAutocommitEnabled = false; DataSetSchema srcSchema = src.GetSchema(); Dictionary <int, int> IDs = new Dictionary <int, int>(); // Creating empty variables and copying global metadata and scalar variables Console.Out.Write("\n\nCreating structure and copying global metadata and scalar variables... "); foreach (VariableSchema v in srcSchema.Variables) { if (v.ID == DataSet.GlobalMetadataVariableID) { // Copying global metadata var dstGlobalMetadata = dst.Metadata; foreach (var attr in v.Metadata) { dstGlobalMetadata[attr.Key] = attr.Value; } continue; } Variable t = dst.AddVariable(v.TypeOfData, v.Name, null, v.Dimensions.AsNamesArray()); IDs.Add(v.ID, t.ID); foreach (var attr in v.Metadata) { t.Metadata[attr.Key] = attr.Value; } if (t.Rank == 0) // scalar { t.PutData(src.Variables.GetByID(v.ID).GetData()); } } dst.Commit(); Console.Out.WriteLine("Done.\n"); /*********************************************************************************** * Adjusting dimensions deltas ***********************************************************************************/ Dimension[] srcDims = srcSchema.GetDimensions(); Dictionary <string, int> deltas = new Dictionary <string, int>(srcDims.Length); foreach (var d in srcDims) { deltas[d.Name] = d.Length; } Console.Out.WriteLine("Total memory capacity: " + (N / 1024.0 / 1024.0).ToString("F2") + " Mb"); int totalSize; do { totalSize = 0; foreach (var var in srcSchema.Variables) { if (var.Rank == 0) { continue; // scalar } int typeSize = SizeOf(var.TypeOfData, sizeofString); int count = 0; foreach (var vdim in var.Dimensions) { int dimDelta = deltas[vdim.Name]; if (count == 0) { count = dimDelta; } else { count *= dimDelta; } } totalSize += typeSize * count; } if (totalSize > N) { string maxDim = null; int max = int.MinValue; foreach (var dim in deltas) { if (dim.Value > max) { max = dim.Value; maxDim = dim.Key; } } if (maxDim == null || max <= 1) { throw new NotSupportedException("Cannot copy the DataSet: it is too large to be copied entirely by the utility for the provided memory capacity"); } deltas[maxDim] = max >> 1; } } while (totalSize > N); // Printing deltas Console.Out.WriteLine("Deltas for the dimensions adjusted (max iteration capacity: " + (totalSize / 1024.0 / 1024.0).ToString("F2") + " Mb):"); foreach (var delta in deltas) { Console.Out.WriteLine(" Dimension " + delta.Key + ": " + delta.Value); } /*********************************************************************************** * Copying data ***********************************************************************************/ Console.WriteLine(); UpdateProgress(0); Dictionary <int, int[]> origins = new Dictionary <int, int[]>(srcSchema.Variables.Length); Dictionary <int, int[]> shapes = new Dictionary <int, int[]>(srcSchema.Variables.Length); List <VariableSchema> copyVars = srcSchema.Variables.Where(vs => (vs.Rank > 0 && vs.ID != DataSet.GlobalMetadataVariableID)).ToList(); Dictionary <string, int> dimOrigin = new Dictionary <string, int>(srcDims.Length); foreach (var d in srcDims) { dimOrigin[d.Name] = 0; } Array.Sort(srcDims, (d1, d2) => d1.Length - d2.Length); int totalDims = srcDims.Length; do { // for each variable: for (int varIndex = copyVars.Count; --varIndex >= 0;) { VariableSchema var = copyVars[varIndex]; bool hasChanged = false; // Getting its origin int[] origin; if (!origins.TryGetValue(var.ID, out origin)) { origin = new int[var.Rank]; origins[var.ID] = origin; hasChanged = true; } // Getting its shape int[] shape; if (!shapes.TryGetValue(var.ID, out shape)) { shape = new int[var.Rank]; for (int i = 0; i < var.Dimensions.Count; i++) { shape[i] = deltas[var.Dimensions[i].Name]; } shapes.Add(var.ID, shape); } // Updating origin for the variable: if (!hasChanged) { for (int i = 0; i < shape.Length; i++) { int o = dimOrigin[var.Dimensions[i].Name]; if (origin[i] != o) { hasChanged = true; origin[i] = o; } } } if (!hasChanged) // this block is already copied { continue; } bool doCopy = false; bool shapeUpdated = false; for (int i = 0; i < shape.Length; i++) { int s = origin[i] + shape[i]; int len = var.Dimensions[i].Length; if (s > len) { if (!shapeUpdated) { shapeUpdated = true; shape = (int[])shape.Clone(); } shape[i] = len - origin[i]; } if (shape[i] > 0) { doCopy = true; } } if (doCopy) { Array data = src.Variables.GetByID(var.ID).GetData(origin, shape); // Compute real size here for strings dst.Variables.GetByID(IDs[var.ID]).PutData(origin, data); } else // variable is copied { copyVars.RemoveAt(varIndex); } } dst.Commit(); // Updating dimensions origin bool isOver = true; for (int i = 0; i < totalDims; i++) { Dimension dim = srcDims[i]; int origin = dimOrigin[dim.Name] + deltas[dim.Name]; if (origin < dim.Length) { dimOrigin[dim.Name] = origin; isOver = false; // Progress indicator if (i == totalDims - 1) { double perc = (double)origin / dim.Length * 100.0; UpdateProgress(perc); } break; } dimOrigin[dim.Name] = 0; } if (isOver) { break; } } while (copyVars.Count > 0); UpdateProgress(100.0); Console.Out.WriteLine(); return(dst); }
private void MergeCSV(DataSet dst, string[] hashes) { for (var i = 0; i < hashes.Length; i++) { using (var src = DataSet.Open(WebApiApplication.GetSharedJobManager(Request).ResultDataSetUri(hashes[i], false))) { var name = src.Metadata[RequestDataSetFormat.EnvironmentVariableNameKey].ToString(); if (src.Metadata.ContainsKey(RequestDataSetFormat.DataSourceNameKey)) { string[] dataSources = (string[])src.Metadata[RequestDataSetFormat.DataSourceNameKey]; var config = WebApiApplication.GetFetchConfiguration(DateTime.MaxValue); dst.Metadata[name + "_dataSourceNames"] = dataSources; dst.Metadata[name + "_dataSourceIDs"] = dataSources.Select(ds => config.DataSources.Where(dsd => dsd.Name == ds).First().ID).ToArray(); } bool isPointSet = src.Variables[RequestDataSetFormat.ValuesVariableName].Dimensions.Count() == 1; string visMethod = isPointSet ? "Points" : "Colormap"; if (i == 0) { if (src.Variables.Contains("lat")) { dst.AddVariableByValue(src.Variables["lat"]); } if (src.Variables.Contains("latmax")) { dst.AddVariableByValue(src.Variables["latmax"]); } if (src.Variables.Contains("latmin")) { dst.AddVariableByValue(src.Variables["latmin"]); } if (src.Variables.Contains("lon")) { dst.AddVariableByValue(src.Variables["lon"]); } if (src.Variables.Contains("lonmax")) { dst.AddVariableByValue(src.Variables["lonmax"]); } if (src.Variables.Contains("lonmin")) { dst.AddVariableByValue(src.Variables["lonmin"]); } dst.AddVariableByValue(src.Variables["hours"]); dst.AddVariableByValue(src.Variables["days"]); dst.AddVariableByValue(src.Variables["years"]); dst.Metadata["VisualHints"] = name + "_" + RequestDataSetFormat.ValuesVariableName + "Style: " + visMethod; } var valuesVar = src[RequestDataSetFormat.ValuesVariableName]; dst.AddVariable <double>(name + "_" + RequestDataSetFormat.ValuesVariableName, valuesVar.GetData(), valuesVar.Dimensions.Select(d => d.Name).ToArray()).Metadata["VisualHints"] = "Style: " + visMethod; var sdVar = src[RequestDataSetFormat.UncertaintyVariableName]; dst.AddVariable <double>(name + "_" + RequestDataSetFormat.UncertaintyVariableName, sdVar.GetData(), sdVar.Dimensions.Select(d => d.Name).ToArray()); if (src.Variables.Contains(RequestDataSetFormat.ProvenanceVariableName)) { var provVar = src[RequestDataSetFormat.ProvenanceVariableName]; dst.AddVariable <ushort>(name + "_" + RequestDataSetFormat.ProvenanceVariableName, provVar.GetData(), provVar.Dimensions.Select(d => d.Name).ToArray()); } } } dst.Commit(); }
public string Get(string path, bool outfolder) { logger.Log(LogLevel.Info, "Entered AVISTEDNetCDFConverter GET()"); try { string result = "false"; string content = File.ReadAllText(path); List <Dictionary <string, string> > data = JsonConvert.DeserializeObject <List <Dictionary <string, string> > >(content); string randomlyGeneratedFolderNamePart = Path.GetFileNameWithoutExtension(Path.GetRandomFileName()); string timeRelatedFolderNamePart = DateTime.Now.Year.ToString() + DateTime.Now.Month.ToString() + DateTime.Now.Day.ToString() + DateTime.Now.Hour.ToString() + DateTime.Now.Minute.ToString() + DateTime.Now.Second.ToString() + DateTime.Now.Millisecond.ToString(); string processRelatedFolderNamePart = System.Diagnostics.Process.GetCurrentProcess().Id.ToString(); string copypath = ""; if (outfolder) { copypath = ConfigurationManager.AppSettings["Save_Downloads"].ToString(); } else { copypath = ConfigurationManager.AppSettings["Converters"].ToString(); } string temporaryDirectoryName = Path.Combine(copypath , timeRelatedFolderNamePart + processRelatedFolderNamePart + randomlyGeneratedFolderNamePart); System.IO.Directory.CreateDirectory(temporaryDirectoryName); logger.Log(LogLevel.Info, "Created Directory"); string uri = Path.Combine(temporaryDirectoryName, "result" + ".nc") + "?openMode=create"; DataSet dscopy = DataSet.Open(uri); string[] results = new string[data.Count + 1]; int i = 0, j = 0; Dictionary <string, string> resultdict = new Dictionary <string, string>(); Dictionary <string, string> tempdict = data.First(); string[] names = tempdict.Keys.ToArray(); string[] values = new string[names.Length]; foreach (Dictionary <string, string> dict in data) { var value = dict.Values.ToArray(); if (j == 0) { for (int k = 0; k < values.Length; k++) { values[k] = value[k]; } j = 1; } else { for (int k = 0; k < values.Length; k++) { values[k] += "," + value[k]; } } } int index = 0; foreach (string s in names) { if (s.Equals("date")) { string[] strings = values[index++].Split(','); DateTime[] date = new DateTime[strings.Length]; int l = 0; foreach (string d in strings) { date[l++] = DateTime.Parse(d); } dscopy.AddVariable <DateTime>(s, date); logger.Log(LogLevel.Info, "Created parameter {0}", s); } else { string[] strings = values[index++].Split(','); float[] vl = new float[strings.Length]; int l = 0; foreach (string d in strings) { vl[l++] = float.Parse(d); } dscopy.AddVariable <float>(s, vl); logger.Log(LogLevel.Info, "Created parameter {0}", s); } } dscopy.Commit(); dscopy.Dispose(); string SourceFolderPath = temporaryDirectoryName; return(SourceFolderPath); } catch (Exception ex) { logger.Error("AVISTEDNetCDFConverter:Failed with exception {0}", ex.Message); } return("Error"); }
static void DoSlice(string uri, Dictionary <string, Range> ranges, string outputUri) { DataSetUri dstUri = DataSetUri.Create(outputUri); if (dstUri.ProviderName.StartsWith("memory")) { throw new NotSupportedException("Copying to memory is not supported by the utility."); } using (DataSet src = DataSet.Open(uri)) { using (DataSet mds = DataSet.Open("msds:memory")) using (DataSet mds2 = DataSet.Open("msds:memory")) { mds.IsAutocommitEnabled = false; mds2.IsAutocommitEnabled = false; foreach (var var in src.Variables) { bool doSlice = false; foreach (var dim in var.Dimensions) { if (var.Dimensions.Contains(dim.Name)) { doSlice = true; break; } } if (doSlice) { var refVar = mds.AddVariableByReference(var); int rank = refVar.Rank; int[] origin = new int[rank]; int[] stride = new int[rank]; int[] count = new int[rank]; for (int i = 0; i < rank; i++) { Range r; if (ranges.TryGetValue(var.Dimensions[i].Name, out r)) { if (r.IsReduced) { origin[i] = r.Origin; stride[i] = 0; count[i] = 1; } else { throw new NotSupportedException("sds slice supports only dimension fixing"); } } else { origin[i] = 0; stride[i] = 1; count[i] = 0; } } Variable strVar = StrideVariable(refVar, origin, stride, count); mds2.AddVariableByReference(strVar); } else { mds2.AddVariableByReference(var); } } // mds2 is ready to be committed try { mds2.Commit(); } catch (DistributedCommitFailedException dex) { if (dex.InnerException is ConstraintsFailedException) { string error = String.Format("Input DataSets are incompatible: {0}", ((ConstraintsFailedException)(dex.InnerException)).Message); WriteError(error); return; } throw; } catch (ConstraintsFailedException cex) { string error = String.Format("Input DataSets are incompatible: {0}", cex.Message); WriteError(error); return; } // mds2 is ready to be cloned Microsoft.Research.Science.Data.Utilities.DataSetCloning.Clone(mds2, dstUri, Microsoft.Research.Science.Data.Utilities.DataSetCloning.DefaultUpdater).Dispose(); } } }
/// <summary> /// Extract an array of values from a state variable in a model grid and add to a two-dimensional variable in an SDS object /// </summary> /// <param name="ecosystemModelGrid">The model grid to extract data from</param> /// <param name="cellIndices">List of indices of active cells in the model grid</param> /// <param name="gridVariableName">The name of the state variable in the model grid</param> /// <param name="traitValue">The trait value of the functional groups to get data for</param> /// <param name="variableType">The type of the state variable: 'stock' or 'cohort'</param> /// <param name="outputVariableName">The name of the variable to write to</param> /// <param name="SDSObject">The SDS object to write to</param> /// <param name="functionalGroupHandler">The functional group handler corresponding to cohorts or stocks</param> /// <param name="initialisation">The Madingley Model initialisation</param> public void Array2DToSDS2D(ModelGrid ecosystemModelGrid, List <uint[]> cellIndices, string gridVariableName, string traitValue, string variableType, string outputVariableName, DataSet SDSObject, FunctionalGroupDefinitions functionalGroupHandler, MadingleyModelInitialisation initialisation) { // Get the missing value from the model grid double MissingValue = ecosystemModelGrid.GlobalMissingValue; // create an array to hold the data to output double[,] dataToConvert = new double[ecosystemModelGrid.NumLatCells, ecosystemModelGrid.NumLonCells]; // generate arrays to hold latitudes and longitudes float[] lats = new float[ecosystemModelGrid.NumLatCells]; float[] lons = new float[ecosystemModelGrid.NumLonCells]; // Populate arrays of latitudes and longitudes, converting from bottom left cell references as used in the model grid // to cell centre references as required by SDS for (uint ii = 0; ii < ecosystemModelGrid.NumLatCells; ii++) { lats[ii] = ecosystemModelGrid.Lats[ii] + (ecosystemModelGrid.LatCellSize / 2); } for (uint jj = 0; jj < ecosystemModelGrid.NumLonCells; jj++) { lons[jj] = ecosystemModelGrid.Lons[jj] + (ecosystemModelGrid.LonCellSize / 2); } // Get the required data from the model grid dataToConvert = ecosystemModelGrid.GetStateVariableGrid(gridVariableName, traitValue, functionalGroupHandler.AllFunctionalGroupsIndex, cellIndices, variableType, initialisation); // If not already contained in the SDS, add the dimension information (geographic coordinates) if (SDSObject.Variables.Contains("Latitude")) { } else { SDSObject.AddVariable <float>("Latitude", lats, "Lat"); } if (SDSObject.Variables.Contains("Longitude")) { } else { SDSObject.AddVariable <float>("Longitude", lons, "Lon"); } // If the SDS object already contains the output variable, then add the data. Otherwise, define the variable and then add the data if (SDSObject.Variables.Contains(outputVariableName)) { SDSObject.PutData <double[, ]>(outputVariableName, dataToConvert); // Commit the changes SDSObject.Commit(); } else { // Set up the dimensions and add the gridded data string[] dimensions = { "Lat", "Lon" }; var DataGrid = SDSObject.AddVariable <double>(outputVariableName, dataToConvert, dimensions); // Add appropriate metadata (including missing values) DataGrid.Metadata["DisplayName"] = outputVariableName; DataGrid.Metadata["MissingValue"] = (double)MissingValue; // Commit changes to update data set SDSObject.Commit(); } }
static void DoMerge(string[] URIs, string output) { DataSetUri dstUri = DataSetUri.Create(output); if (dstUri.ProviderName.StartsWith("memory")) { throw new NotSupportedException("Copying to memory is not supported by the utility."); } DataSet[] dss = new DataSet[URIs.Length]; int i = 0; try { for (i = 0; i < dss.Length; i++) { dss[i] = DataSet.Open(URIs[i]); } } catch { for (; --i >= 0;) { dss[i].Dispose(); } throw; } using (DataSet mds = DataSet.Open("msds:memory")) { mds.IsAutocommitEnabled = false; IMetadataConflictResolver conflictResolver = new WarningConflictResolver(); foreach (var ds in dss) { // Global metadata foreach (var attr in ds.Metadata) { object val; if (mds.Metadata.ContainsKey(attr.Key, SchemaVersion.Recent)) { val = conflictResolver.Resolve(attr.Key, mds.Metadata[attr.Key, SchemaVersion.Recent], attr.Value); } else { val = attr.Value; } mds.Metadata[attr.Key] = val; } // Variables foreach (var var in ds.Variables) { mds.AddVariableByReference(var); } } try { mds.Commit(); } catch (DistributedCommitFailedException dex) { if (dex.InnerException is ConstraintsFailedException) { string error = String.Format("Input DataSets are incompatible: {0}", ((ConstraintsFailedException)(dex.InnerException)).Message); WriteError(error); return; } throw; } catch (ConstraintsFailedException cex) { string error = String.Format("Input DataSets are incompatible: {0}", cex.Message); WriteError(error); return; } Microsoft.Research.Science.Data.Utilities.DataSetCloning.Clone(mds, dstUri, Microsoft.Research.Science.Data.Utilities.DataSetCloning.DefaultUpdater).Dispose(); } }
public void SolveAll(string ctorString) { //dataSet = ProxyDataSet.Open("msds:nc?file=../../../temp.nc"); dataSet = ProxyDataSet.Open("msds:memory"); //Инициализируем DataSet Variable X = dataSet.AddVariable <double>("X", "x"); Variable Y = dataSet.AddVariable <double>("Y", "y"); Variable Z = dataSet.AddVariable <double>("Z", "z"); Variable time = dataSet.AddVariable <double>("Time", "t"); Variable u = dataSet.AddVariable <double>("U velocity", "x", "y", "z", "t"); Variable v = dataSet.AddVariable <double>("V velocity", "x", "y", "z", "t"); Variable w = dataSet.AddVariable <double>("W velocity", "x", "y", "z", "t"); Variable T = dataSet.AddVariable <double>("Temperature", "x", "y", "z", "t"); Variable div = dataSet.AddVariable <double>("Divergence", "x", "y", "z", "t"); dataSet.Commit(); double[] wArr = new double[modellingParams.Nx]; for (int i = 0; i < modellingParams.Nx; i++) { wArr[i] = i * modellingParams.Dx; } X.PutData(wArr); wArr = new double[modellingParams.Ny]; for (int i = 0; i < modellingParams.Ny; i++) { wArr[i] = i * modellingParams.Dy; } Y.PutData(wArr); wArr = new double[modellingParams.Nz]; for (int i = 0; i < modellingParams.Nz; i++) { wArr[i] = i * modellingParams.Dz; } Z.PutData(wArr); //Инициализируем рассчетный модуль для слоя начальными условиями LayerSolver solver = new LayerSolver(prevData, modellingParams); u.Append(prevData.U.ToArray(), "t"); v.Append(prevData.V.ToArray(), "t"); w.Append(prevData.W.ToArray(), "t"); T.Append(prevData.T.ToArray(), "t"); div.Append(prevData.Div.ToArray(), "t"); time.PutData(new double[1] { 0 }); dataSet.Commit(); //Основной рассчет for (int i = 1; i < modellingParams.Nt; i++) { LayerData result = solver.Solve(true); //Кладем данные в DataSet u.Append(result.U.ToArray(), "t"); v.Append(result.V.ToArray(), "t"); w.Append(result.W.ToArray(), "t"); T.Append(result.T.ToArray(), "t"); div.Append(result.Div.ToArray(), "t"); time.Append(new double[1] { (double)i / modellingParams.Nt }); dataSet.Commit(); //Переходим на следующий слой solver = new LayerSolver(prevData, result, modellingParams); prevData = result; double temp = 0; int count = 0; for (int ii = 1; ii < result.Width; ii++) { for (int jj = 1; jj < result.Height; jj++) { for (int kk = 1; kk < result.Thickness; kk++) { temp += result.Div[ii, jj, kk]; count++; } } } temp = temp / count * modellingParams.Dx * modellingParams.Dy * modellingParams.Dz; Console.WriteLine((double)i / modellingParams.Nt * 100 + "% Error = " + temp); } dataSet.Commit(); }
public static void MergeTable(IFetchConfiguration config, DataSet dst, Tuple <DataSet, string[]>[] requests) { // For faster lookup of data source name from id var id2name = new Dictionary <int, string>(); foreach (var dsd in config.DataSources) { id2name.Add(dsd.ID, dsd.Name); } var var2var = new Dictionary <string, Variables>(); // Environment variable short name => data set variables var cell2row = new Dictionary <Cell, int>(); // Space-time cell => row number var rowCount = 0; var optionalRowCount = 0; var regionRowCount = 0; var regions = dst.AddVariable <string>("region", "i"); var lat = dst.AddVariable <double>("lat", "i"); var lon = dst.AddVariable <double>("lon", "i"); Variable <double> latmin = null, latmax = null, lonmin = null, lonmax = null; var start = dst.AddVariable <DateTime>("start", "i"); var end = dst.AddVariable <DateTime>("end", "i"); for (var i = 0; i < requests.Length; i++) { using (var src = requests[i].Item1.Clone("msds:memory")) { var name = src.Metadata[RequestDataSetFormat.EnvironmentVariableNameKey].ToString(); // Define data source name to use when not provenance is supplied var noProvDataSource = ""; if (src.Metadata.ContainsKey(RequestDataSetFormat.DataSourceNameKey)) { var requestedDataSources = (string[])src.Metadata[RequestDataSetFormat.DataSourceNameKey]; if (requestedDataSources.Length == 1) { noProvDataSource = requestedDataSources[0]; } } var envVar = config.EnvironmentalVariables.Where(ev => ev.Name == name).First(); Variables variables; if (!var2var.TryGetValue(name, out variables)) { variables = new Variables(); variables.Values = dst.AddVariable <double>(String.Concat(name, " (", envVar.Units, ")"), "i"); variables.Values.MissingValue = Double.NaN; variables.Uncertainty = dst.AddVariable <double>(String.Concat(name, "_uncertainty"), "i"); variables.Uncertainty.MissingValue = Double.NaN; variables.Provenance = dst.AddVariable <string>(String.Concat(name, "_provenance"), "i"); variables.Provenance.MissingValue = null; var2var.Add(name, variables); } foreach (var t in Linearize(src, requests[i].Item2)) { int row; if (!cell2row.TryGetValue(t.Item1, out row)) { row = rowCount++; cell2row.Add(t.Item1, row); } if (t.Item5 != "") { regions[row] = t.Item5; regionRowCount = Math.Max(regionRowCount, row); } lat[row] = t.Item1.Lat; lon[row] = t.Item1.Lon; start[row] = t.Item1.Start; end[row] = t.Item1.End; if (t.Item1.LatMin != t.Item1.Lat) { if (latmin == null) { latmin = dst.AddVariable <double>("latmin", "i"); latmax = dst.AddVariable <double>("latmax", "i"); lonmin = dst.AddVariable <double>("lonmin", "i"); lonmax = dst.AddVariable <double>("lonmax", "i"); } latmin[row] = t.Item1.LatMin; latmax[row] = t.Item1.LatMax; lonmin[row] = t.Item1.LonMin; lonmax[row] = t.Item1.LonMax; optionalRowCount = Math.Max(optionalRowCount, row); } variables.Values[row] = t.Item2; variables.Uncertainty[row] = t.Item3 < Double.MaxValue ? t.Item3 : Double.NaN; variables.Provenance[row] = (t.Item4 == 65535) ? noProvDataSource : id2name[t.Item4]; variables.RowCount = row; } } } foreach (var v in var2var) { for (var j = v.Value.RowCount + 1; j < rowCount; j++) { v.Value.Values[j] = Double.NaN; v.Value.Uncertainty[j] = Double.NaN; v.Value.Provenance[j] = null; } } if (latmin != null) { for (var j = optionalRowCount + 1; j < rowCount; j++) { latmin[j] = Double.NaN; latmax[j] = Double.NaN; lonmin[j] = Double.NaN; lonmax[j] = Double.NaN; } } for (var j = regionRowCount + 1; j < rowCount; j++) { regions[j] = ""; } dst.Commit(); }
public static void FillDataSetWithStatusCheckParams(DataSet ds, int expectedCalculationTime, string hash,bool resendRequest, bool commit) { ds.Metadata[Namings.restApiMetadataNameExpectedCalculationTime] = expectedCalculationTime; ds.Metadata[Namings.restApiMetadataNameHash] = hash; ds.Metadata[Namings.restApiMetadataNameResendRequest] = resendRequest; if (commit) ds.Commit(); }
public static void CopyRequestedDataSet(DataSet inDs, DataSet outDs, bool commit) { foreach (var entry in inDs.Metadata) { outDs.Metadata[entry.Key] = entry.Value; } if (!outDs.Variables.Contains(Namings.VarNameLatMax)) { outDs.AddVariable <double>(Namings.VarNameLatMax, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameLatMin)) { outDs.AddVariable <double>(Namings.VarNameLatMin, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameLonMax)) { outDs.AddVariable <double>(Namings.VarNameLonMax, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameLonMin)) { outDs.AddVariable <double>(Namings.VarNameLonMin, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameDayMax)) { outDs.AddVariable <int>(Namings.VarNameDayMax, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameDayMin)) { outDs.AddVariable <int>(Namings.VarNameDayMin, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameHourMax)) { outDs.AddVariable <int>(Namings.VarNameHourMax, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameHourMin)) { outDs.AddVariable <int>(Namings.VarNameHourMin, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameYearMax)) { outDs.AddVariable <int>(Namings.VarNameYearMax, Namings.dimNameCells); } if (!outDs.Variables.Contains(Namings.VarNameYearMin)) { outDs.AddVariable <int>(Namings.VarNameYearMin, Namings.dimNameCells); } outDs.Variables[Namings.VarNameDayMax].Append(inDs.Variables[Namings.VarNameDayMax].GetData()); outDs.Variables[Namings.VarNameDayMin].Append(inDs.Variables[Namings.VarNameDayMin].GetData()); outDs.Variables[Namings.VarNameHourMax].Append(inDs.Variables[Namings.VarNameHourMax].GetData()); outDs.Variables[Namings.VarNameHourMin].Append(inDs.Variables[Namings.VarNameHourMin].GetData()); outDs.Variables[Namings.VarNameYearMax].Append(inDs.Variables[Namings.VarNameYearMax].GetData()); outDs.Variables[Namings.VarNameYearMin].Append(inDs.Variables[Namings.VarNameYearMin].GetData()); outDs.Variables[Namings.VarNameLonMax].Append(inDs.Variables[Namings.VarNameLonMax].GetData()); outDs.Variables[Namings.VarNameLonMin].Append(inDs.Variables[Namings.VarNameLonMin].GetData()); outDs.Variables[Namings.VarNameLatMax].Append(inDs.Variables[Namings.VarNameLatMax].GetData()); outDs.Variables[Namings.VarNameLatMin].Append(inDs.Variables[Namings.VarNameLatMin].GetData()); if (commit) { outDs.Commit(); } }
public static void FillDataSetWithRequest(DataSet ds, ClimateParameter parameter, double[] latmin, double[] latmax, double[] lonmin, double[] lonmax, int[] starthour, int[] stophour, int[] startday, int[] stopday, int[] startyear, int[] stopyear, FetchingOptions options) { ds.IsAutocommitEnabled = false; ds.Metadata[Namings.metadataNameParameter] = Namings.GetParameterName(parameter); ds.Metadata[Namings.metadataNameCoverage] = Namings.GetCoverageName(parameter); ds.Metadata[Namings.metadataNameProvenanceHint] = options.FetchClimateProvenanceControlStr; if (!ds.Variables.Contains(Namings.VarNameLatMax)) ds.AddVariable<double>(Namings.VarNameLatMax, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameLatMin)) ds.AddVariable<double>(Namings.VarNameLatMin, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameLonMax)) ds.AddVariable<double>(Namings.VarNameLonMax, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameLonMin)) ds.AddVariable<double>(Namings.VarNameLonMin, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameDayMax)) ds.AddVariable<int>(Namings.VarNameDayMax, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameDayMin)) ds.AddVariable<int>(Namings.VarNameDayMin, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameHourMax)) ds.AddVariable<int>(Namings.VarNameHourMax, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameHourMin)) ds.AddVariable<int>(Namings.VarNameHourMin, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameYearMax)) ds.AddVariable<int>(Namings.VarNameYearMax, Namings.dimNameCells); if (!ds.Variables.Contains(Namings.VarNameYearMin)) ds.AddVariable<int>(Namings.VarNameYearMin, Namings.dimNameCells); ds.Variables[Namings.VarNameDayMax].Append(stopday); ds.Variables[Namings.VarNameDayMin].Append(startday); ds.Variables[Namings.VarNameHourMax].Append(stophour); ds.Variables[Namings.VarNameHourMin].Append(starthour); ds.Variables[Namings.VarNameYearMax].Append(stopyear); ds.Variables[Namings.VarNameYearMin].Append(startyear); ds.Variables[Namings.VarNameLonMax].Append(lonmax); ds.Variables[Namings.VarNameLonMin].Append(lonmin); ds.Variables[Namings.VarNameLatMax].Append(latmax); ds.Variables[Namings.VarNameLatMin].Append(latmin); ds.Commit(); }
public static void FillDataSetWithRequest(DataSet ds, ClimateParameter parameter, double[] latmin, double[] latmax, double[] lonmin, double[] lonmax, int[] starthour, int[] stophour, int[] startday, int[] stopday, int[] startyear, int[] stopyear, FetchingOptions options) { ds.IsAutocommitEnabled = false; ds.Metadata[Namings.metadataNameParameter] = Namings.GetParameterName(parameter); ds.Metadata[Namings.metadataNameCoverage] = Namings.GetCoverageName(parameter); ds.Metadata[Namings.metadataNameProvenanceHint] = options.FetchClimateProvenanceControlStr; if (!ds.Variables.Contains(Namings.VarNameLatMax)) { ds.AddVariable <double>(Namings.VarNameLatMax, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameLatMin)) { ds.AddVariable <double>(Namings.VarNameLatMin, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameLonMax)) { ds.AddVariable <double>(Namings.VarNameLonMax, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameLonMin)) { ds.AddVariable <double>(Namings.VarNameLonMin, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameDayMax)) { ds.AddVariable <int>(Namings.VarNameDayMax, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameDayMin)) { ds.AddVariable <int>(Namings.VarNameDayMin, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameHourMax)) { ds.AddVariable <int>(Namings.VarNameHourMax, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameHourMin)) { ds.AddVariable <int>(Namings.VarNameHourMin, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameYearMax)) { ds.AddVariable <int>(Namings.VarNameYearMax, Namings.dimNameCells); } if (!ds.Variables.Contains(Namings.VarNameYearMin)) { ds.AddVariable <int>(Namings.VarNameYearMin, Namings.dimNameCells); } ds.Variables[Namings.VarNameDayMax].Append(stopday); ds.Variables[Namings.VarNameDayMin].Append(startday); ds.Variables[Namings.VarNameHourMax].Append(stophour); ds.Variables[Namings.VarNameHourMin].Append(starthour); ds.Variables[Namings.VarNameYearMax].Append(stopyear); ds.Variables[Namings.VarNameYearMin].Append(startyear); ds.Variables[Namings.VarNameLonMax].Append(lonmax); ds.Variables[Namings.VarNameLonMin].Append(lonmin); ds.Variables[Namings.VarNameLatMax].Append(latmax); ds.Variables[Namings.VarNameLatMin].Append(latmin); ds.Commit(); }
private static void UpdateMetadata(DataSet ds, string[] args) { // sds update air.nc /g val "-50.0" // sds update air.nc /g /t:Double "min" "-50.0" // sds update air.nc air /t:Double "min" "-50.0" // sds update air.nc air "min" "-50.0" // sds update air.nc air /t:Double "min" "-50.0" "-50.2" Variable v = null; int pi = 0; if (args[pi] != "/g" && !args[pi].StartsWith("/t")) // not global metadata => var { string avar = args[pi++]; v = GetVar(ds, avar); if (v == null) { WriteError("No such variable: " + avar); return; } } else if (args[pi] == "/g") // global { pi++; } string stype = null; if (args[pi].StartsWith("/t:")) { stype = args[pi].Substring(3); pi++; } // Key string key = args[pi++]; object value = null; Type type = null; if (stype != null) { type = GetType(stype); if (type == null) { return; } } // Values ArrayList values = new ArrayList(); Type elType = type; if (type != null && type.IsArray) { elType = type.GetElementType(); } while (pi < args.Length) { string svalue = args[pi++]; value = GetValue(svalue, ref elType); values.Add(value); if (values.Count > 1 && (stype != null && !type.IsArray)) { WriteError("Too many values specified."); return; } } if (values.Count == 0 && (type == null || !type.IsArray)) { WriteError("No data specified."); return; } if ((stype != null && type.IsArray) || values.Count > 1) // array { Array arr = Array.CreateInstance(elType, values.Count); for (int i = 0; i < arr.Length; i++) { arr.SetValue(values[i], i); } value = arr; } else { value = values[0]; } Console.WriteLine(); try { Console.WriteLine("Key: " + key); Console.WriteLine("Type: " + value.GetType()); Console.Write("Value: "); foreach (var item in values) { Console.Write(item + " "); } Console.WriteLine(); if (v != null) { v.Metadata[key] = value; } else { ds.Metadata[key] = value; } ds.Commit(); Console.WriteLine("\nMetadata updated."); } catch (Exception ex) { Console.ForegroundColor = ConsoleColor.Red; Console.Error.WriteLine("\nFAILED: " + ex); Console.ResetColor(); } }