/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount,itemCount+1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j+1); } dfs0File.Reset(); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } res[i, j+1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return (res); }
/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount, itemCount + 1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j + 1); } dfs0File.Reset(); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } res[i, j + 1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return(res); }
/// <summary> /// Update time series with a constant change factor, adding 10% to all values /// </summary> /// <param name="dfs0File">Path and name of file, e.g. Rain_instantaneous.dfs0 test file</param> /// <param name="dfs0FileNew">Name of new updated file</param> public static void UpdateDfs0Data(string dfs0File, string dfs0FileNew) { // Open source file IDfsFile source = DfsFileFactory.DfsGenericOpen(dfs0File); // Create a new file with updated rain values DfsBuilder builder = DfsBuilder.Create(source.FileInfo.FileTitle + "Updated", "MIKE SDK", 13); // Copy header info from source file to new file builder.SetDataType(source.FileInfo.DataType); builder.SetGeographicalProjection(source.FileInfo.Projection); builder.SetTemporalAxis(source.FileInfo.TimeAxis); // Copy over first item from source file to new file builder.AddDynamicItem(source.ItemInfo[0]); // Create the new file builder.CreateFile(dfs0FileNew); IDfsFile target = builder.GetFile(); // Loop over all timesteps for (int i = 0; i < source.FileInfo.TimeAxis.NumberOfTimeSteps; i++) { // Read time step for item, and extract value IDfsItemData <double> itemData = (IDfsItemData <double>)source.ReadItemTimeStep(1, i); double value = itemData.Data[0]; // Write new value to target, adding 10% to its value target.WriteItemTimeStepNext(itemData.Time, new double[] { value * 1.1 }); } source.Close(); target.Close(); }
/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount, itemCount + 1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j + 1); } dfs0File.Reset(); // Check if time axis is really a time axis, or if it is a non-time axis eumUnit timeUnit = dfs0File.FileInfo.TimeAxis.TimeUnit; bool isTimeUnit = EUMWrapper.eumUnitsEqv((int)eumUnit.eumUsec, (int)timeUnit); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { if (isTimeUnit) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } else // not a time-unit, just return the value { res[i, 0] = itemData.Time; } } res[i, j + 1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return(res); }
/// <summary> /// Introductory example of how to load a dfs0 file. /// <para> /// The method assumes that the Rain_stepaccumulated.dfs0 test file /// is the input file. /// </para> /// </summary> /// <param name="filename">path and name of Rain_stepaccumulated.dfs0 test file</param> public static double ReadDfs0File(string filename) { // Open the file as a generic dfs file IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs0File.FileInfo; int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 19 // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "Rain" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Double ValueType valueType = dynamicItemInfo.ValueType; // StepAccumulated // Read data of first item, third time step (items start by 1, timesteps by 0), IDfsItemData datag = dfs0File.ReadItemTimeStep(1, 2); double value1 = System.Convert.ToDouble(datag.Data.GetValue(0)); // 0.36 // Assuming this is a double value, the item data object can be converted to the correct type IDfsItemData <double> data = (IDfsItemData <double>)datag; double value2 = data.Data[0]; // 0.36 // This iterates through all timesteps and items in the file // For performance reasons it is important to iterate over time steps // first and items second. double sum = 0; for (int i = 0; i < steps; i++) { for (int j = 1; j <= dfs0File.ItemInfo.Count; j++) { data = (IDfsItemData <double>)dfs0File.ReadItemTimeStep(j, i); double value = data.Data[0]; sum += value; } } dfs0File.Close(); return(sum); }
/// <summary> /// Find maximum value and time of maximum for a specified item in dfs0 file /// </summary> /// <param name="filename">Path and name of file, e.g. data_ndr_roese.dfs0 test file</param> /// <param name="itemNumber">Item number to find maximum for</param> public static double FindMaxValue(string filename, int itemNumber) { // Open file, using stream class Stream stream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); IDfsFile file = DfsFileFactory.DfsGenericOpen(stream); //IDfsFile file = DfsFileFactory.DfsGenericOpen(filename); // Extract Start date-time of file - assuming file is equidistant-calendar axis IDfsEqCalendarAxis timeAxis = (IDfsEqCalendarAxis)file.FileInfo.TimeAxis; DateTime startDateTime = timeAxis.StartDateTime; // Empty item data, reused when calling ReadItemTimeStep IDfsItemData <float> itemData = (IDfsItemData <float>)file.CreateEmptyItemData(itemNumber); // max value and time variables double maxValue = double.MinValue; double maxTimeSeconds = -1; DateTime maxDateTime = DateTime.MinValue; // Loop over all times in file for (int i = 0; i < file.FileInfo.TimeAxis.NumberOfTimeSteps; i++) { // Read time step for item, and extract value file.ReadItemTimeStep(itemData, i); double value = itemData.Data[0]; // Check if value is larger than maxValue if (value > maxValue) { maxValue = value; maxTimeSeconds = itemData.TimeInSeconds(timeAxis); maxDateTime = itemData.TimeAsDateTime(timeAxis); } } // Report results Console.Out.WriteLine("Max Value : {0} {1}", maxValue, file.ItemInfo[itemNumber - 1].Quantity.UnitAbbreviation); Console.Out.WriteLine("Max Value time : {0}", maxDateTime.ToString("yyyy-MM-dd HH:mm:ss")); return(maxValue); }
/// <summary> /// Introductory example of how to load a dfs1 file. /// <para> /// The method assumes that the wln.dfs1 test file /// is the input file. /// </para> /// </summary> /// <param name="filename">path and name of wln.dfs1 test file</param> public static void ReadingDfs1File(string filename) { // Open the file as a dfs1 file Dfs1File dfs1File = DfsFileFactory.Dfs1FileOpen(filename); // Spatial axis for this file is a 2D equidistant axis IDfsAxisEqD1 axisEqD1 = ((IDfsAxisEqD1)dfs1File.SpatialAxis); double dx = axisEqD1.Dx; // 900 // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs1File.FileInfo; int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 577 // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs1File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "WL-N (m)" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Float // Read data of first item, third time step (items start by 1, timesteps by 0), // assuming data is of type float. IDfsItemData<float> data = (IDfsItemData<float>)dfs1File.ReadItemTimeStep(1, 2); }
//private readonly List<IExchangeItem> _exchangeItems; //private readonly List<TimeSerie> _timeSeries; /// <summary> /// DFS0 reader. Gets information from the dfs file, and reads data. /// </summary> /// <param name="dfsfile">full path string to dfs0 file.</param> public Dfs0Reader(string dfsfile) : base(dfsfile) { // Set ObservationFile if (!File.Exists(dfsfile)) { throw new FileNotFoundException("\n ERROR: DFS File Not Found! \n Could not find: {0} \n", dfsfile); } // Determine Type string fileExtension = Path.GetExtension(dfsfile); if (System.String.Compare(fileExtension, ".dfs0", System.StringComparison.OrdinalIgnoreCase) == 0) { fileExtension = Path.GetExtension(dfsfile); } else { throw new Exception("\n ERROR: Observation File Type Incorrect! Expecting dfs0. \n \n"); } // Open the file as a generic dfs file _dfs0File = DfsFileFactory.DfsGenericOpen(dfsfile); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = _dfs0File.FileInfo; // Check for dfs compliance CheckDFSCompliance(); // Number of time steps (same for all items) _numTimeSteps = fileInfo.TimeAxis.NumberOfTimeSteps; // Starting from... int _firstTimeStepIndex = fileInfo.TimeAxis.FirstTimeStepIndex; // Number of variable items in dfs0 _numItems = _dfs0File.ItemInfo.Count; // Add the IDs to list (Keys) _itemIDs = new List <string>(); _quantities = new List <string>(); _xyLayerPoints = new List <IXYLayerPoint>(); foreach (var itemInfo in _dfs0File.ItemInfo) { String name = itemInfo.Name; var coords = name.Split(','); double x = Convert.ToDouble(coords[0]); double y = Convert.ToDouble(coords[1]); int zLayer = Convert.ToInt32(coords[2]); _quantities.Add(_dfs0File.FileInfo.FileTitle); _itemIDs.Add(name); _xyLayerPoints.Add(new XYLayerPoint(x, y, zLayer)); } //Gather all times _times = _dfs0File.FileInfo.TimeAxis.GetDateTimes().ToList(); _times = _timesteps; DateTime firstTime = _times[0]; if (_dfs0File.FileInfo.TimeAxis.TimeAxisType != TimeAxisType.CalendarEquidistant) { //Handle pseudo irreggular files double[] dates = new double[_numTimeSteps]; //just make 1 bigger for easy indexing for (int iTimeStep = _firstTimeStepIndex; iTimeStep < _numTimeSteps; iTimeStep++) { for (int iItem = 1; iItem < _numItems + 1; iItem++) { IDfsItemData data1 = _dfs0File.ReadItemTimeStep(iItem, iTimeStep); double offsetTime = data1.Time; if (iItem == 1) { dates[iTimeStep] = offsetTime; } else { if (Math.Abs(offsetTime - dates[iTimeStep]) > 1.0) { throw new Exception("Non Equidistant Calander is not regular"); } } } if (iTimeStep > 0) { _times[iTimeStep] = _times[0].AddSeconds(dates[iTimeStep]); } } } IList <IDfsDynamicItemInfo> infoAllTimes = _dfs0File.ItemInfo; String TimeSeriesName = infoAllTimes[0].Name; // Delelte Values _deleteValueDouble = _dfs0File.FileInfo.DeleteValueDouble; _deleteValueFloat = _dfs0File.FileInfo.DeleteValueFloat; }
/// <summary> /// Create a new file, being the difference of two files. /// <para> /// The two input files must be equal in structure, e.g. coming /// from the same simulation but giving different results. /// Header and static data must be identical, only difference /// must be in values of the dynamic data. /// </para> /// </summary> public static void CreateDiffFile(string file1, string file2, string filediff = null) { IDfsFile dfs1 = DfsFileFactory.DfsGenericOpen(file1); IDfsFile dfs2 = DfsFileFactory.DfsGenericOpen(file2); // Validate that it has the same number of items. if (dfs1.ItemInfo.Count != dfs2.ItemInfo.Count) { throw new Exception("Number of dynamic items does not match"); } int numItems = dfs1.ItemInfo.Count; // In case number of time steps does not match, take the smallest. int numTimes = dfs1.FileInfo.TimeAxis.NumberOfTimeSteps; if (numTimes > dfs2.FileInfo.TimeAxis.NumberOfTimeSteps) { numTimes = dfs2.FileInfo.TimeAxis.NumberOfTimeSteps; Console.Out.WriteLine("Number of time steps does not match, using the smallest number"); } // For recording max difference for every item double[] maxDiff = new double[dfs1.ItemInfo.Count]; // Index in time (index) of maximum and first difference. -1 if no difference int[] maxDiffTime = new int[dfs1.ItemInfo.Count]; int[] firstDiffTime = new int[dfs1.ItemInfo.Count]; for (int i = 0; i < dfs1.ItemInfo.Count; i++) { maxDiffTime[i] = -1; firstDiffTime[i] = -1; } // Copy over info from the first file, assuming the second file contains the same data. IDfsFileInfo fileInfo = dfs1.FileInfo; DfsBuilder builder = null; if (!string.IsNullOrEmpty(filediff)) { builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion); // Set up the header builder.SetDataType(fileInfo.DataType); builder.SetGeographicalProjection(fileInfo.Projection); builder.SetTemporalAxis(fileInfo.TimeAxis); builder.SetItemStatisticsType(fileInfo.StatsType); builder.DeleteValueByte = fileInfo.DeleteValueByte; builder.DeleteValueDouble = fileInfo.DeleteValueDouble; builder.DeleteValueFloat = fileInfo.DeleteValueFloat; builder.DeleteValueInt = fileInfo.DeleteValueInt; builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt; // Transfer compression keys. if (fileInfo.IsFileCompressed) { int[] xkey; int[] ykey; int[] zkey; fileInfo.GetEncodeKey(out xkey, out ykey, out zkey); builder.SetEncodingKey(xkey, ykey, zkey); } // Copy custom blocks foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks) { builder.AddCustomBlock(customBlock); } } // Copy dynamic item definitions bool[] floatItems = new bool[dfs1.ItemInfo.Count]; for (int i = 0; i < dfs1.ItemInfo.Count; i++) { var itemInfo = dfs1.ItemInfo[i]; // Validate item sizes var itemInfo2 = dfs2.ItemInfo[i]; if (itemInfo.ElementCount != itemInfo2.ElementCount) { throw new Exception("Dynamic items must have same size, item number " + (i + 1) + " has different sizes in the two files"); } // Validate the data type, only supporting floats and doubles. if (itemInfo.DataType == DfsSimpleType.Float) { floatItems[i] = true; } else if (itemInfo.DataType != DfsSimpleType.Double) { throw new Exception("Dynamic item must be double or float, item number " + (i + 1) + " is of type " + (itemInfo.DataType)); } builder?.AddDynamicItem(itemInfo); } // Create file builder?.CreateFile(filediff); if (builder != null) { // Copy over static items from file 1, assuming the static items of file 2 are identical IDfsStaticItem si1; while (null != (si1 = dfs1.ReadStaticItemNext())) { builder.AddStaticItem(si1); } } // Get the file DfsFile diff = builder?.GetFile(); // Write dynamic data to the file, being the difference between the two for (int i = 0; i < numTimes; i++) { for (int j = 0; j < numItems; j++) { if (floatItems[j]) { IDfsItemData <float> data1 = dfs1.ReadItemTimeStepNext() as IDfsItemData <float>; IDfsItemData <float> data2 = dfs2.ReadItemTimeStepNext() as IDfsItemData <float>; for (int k = 0; k < data1.Data.Length; k++) { float valuediff = data1.Data[k] - data2.Data[k]; data1.Data[k] = valuediff; float absValueDiff = Math.Abs(valuediff); if (absValueDiff > maxDiff[j]) { maxDiff[j] = absValueDiff; maxDiffTime[j] = i; if (firstDiffTime[j] == -1) { firstDiffTime[j] = i; } } } diff?.WriteItemTimeStepNext(data1.Time, data1.Data); } else { IDfsItemData <double> data1 = dfs1.ReadItemTimeStepNext() as IDfsItemData <double>; IDfsItemData <double> data2 = dfs2.ReadItemTimeStepNext() as IDfsItemData <double>; for (int k = 0; k < data1.Data.Length; k++) { double valuediff = data1.Data[k] - data2.Data[k]; data1.Data[k] = valuediff; double absValueDiff = Math.Abs(valuediff); if (absValueDiff > maxDiff[j]) { maxDiff[j] = absValueDiff; maxDiffTime[j] = i; if (firstDiffTime[j] == -1) { firstDiffTime[j] = i; } } } diff?.WriteItemTimeStepNext(data1.Time, data1.Data); } } } System.Console.WriteLine("Difference statistics:"); for (int i = 0; i < maxDiffTime.Length; i++) { if (maxDiffTime[i] < 0) { Console.WriteLine("{0,-30}: no difference", dfs1.ItemInfo[i].Name); } else { Console.WriteLine("{0,-30}: Max difference at timestep {1,3}: {2}. First difference at timestep {3}", dfs1.ItemInfo[i].Name, maxDiffTime[i], maxDiff[i], firstDiffTime[i]); } } dfs1.Close(); dfs2.Close(); diff?.Close(); }
/// <summary> /// Extract sub-area of dfsu (2D) file to a new dfsu file /// </summary> /// <param name="sourceFilename">Name of source file, i.e. OresundHD.dfsu test file</param> /// <param name="outputFilename">Name of output file</param> /// <param name="x1">Lower left x coordinate of sub area</param> /// <param name="y1">Lower left y coordinate of sub area</param> /// <param name="x2">upper right x coordinate of sub area</param> /// <param name="y2">upper right y coordinate of sub area</param> public static void ExtractSubareaDfsu2D(string sourceFilename, string outputFilename, double x1, double y1, double x2, double y2) { DfsuFile dfsu = DfsFileFactory.DfsuFileOpen(sourceFilename); // Node coordinates double[] X = dfsu.X; double[] Y = dfsu.Y; float[] Z = dfsu.Z; int[] Code = dfsu.Code; // Loop over all elements, and all its nodes: If one node is inside // region, element (and nodes) are to be included in new mesh List <int> elmtsIncluded = new List <int>(); bool[] nodesIncluded = new bool[dfsu.NumberOfNodes]; for (int i = 0; i < dfsu.NumberOfElements; i++) { // Nodes of element int[] nodes = dfsu.ElementTable[i]; // Check if one of the nodes of the element is inside region bool elmtIncluded = false; for (int j = 0; j < nodes.Length; j++) { int node = nodes[j] - 1; if (x1 <= X[node] && X[node] <= x2 && y1 <= Y[node] && Y[node] <= y2) { elmtIncluded = true; } } if (elmtIncluded) { // Add element to list of included elements elmtsIncluded.Add(i); // Mark all nodes of element as included for (int j = 0; j < nodes.Length; j++) { int node = nodes[j] - 1; nodesIncluded[node] = true; } } } // array containing numbers of existing nodes in new mesh (indices) int[] renumber = new int[dfsu.NumberOfNodes]; // new mesh nodes List <double> X2 = new List <double>(); List <double> Y2 = new List <double>(); List <float> Z2 = new List <float>(); List <int> Code2 = new List <int>(); List <int> nodeIds = new List <int>(); int i2 = 0; for (int i = 0; i < dfsu.NumberOfNodes; i++) { if (nodesIncluded[i]) { X2.Add(X[i]); Y2.Add(Y[i]); Z2.Add(Z[i]); Code2.Add(Code[i]); nodeIds.Add(dfsu.NodeIds[i]); // Node with index i will get index i2 in new mesh renumber[i] = i2; i2++; } } // New mesh elements List <int[]> elmttable2 = new List <int[]>(); List <int> elmtIds = new List <int>(); for (int i = 0; i < elmtsIncluded.Count; i++) { // Add new element int elmt = elmtsIncluded[i]; int[] nodes = dfsu.ElementTable[elmt]; // newNodes must be renumbered int[] newNodes = new int[nodes.Length]; for (int j = 0; j < nodes.Length; j++) { // Do the renumbering of nodes from existing mesh to new mesh newNodes[j] = renumber[nodes[j] - 1] + 1; } elmttable2.Add(newNodes); elmtIds.Add(dfsu.ElementIds[i]); } // Create 2D dfsu file DfsuBuilder builder = DfsuBuilder.Create(DfsuFileType.Dfsu2D); // Setup header and geometry builder.SetNodes(X2.ToArray(), Y2.ToArray(), Z2.ToArray(), Code2.ToArray()); //builder.SetNodeIds(nodeIds.ToArray()); builder.SetElements(elmttable2.ToArray()); builder.SetElementIds(elmtIds.ToArray()); // retain original element id's builder.SetProjection(dfsu.Projection); builder.SetTimeInfo(dfsu.StartDateTime, dfsu.TimeStepInSeconds); if (dfsu.ZUnit == eumUnit.eumUUnitUndefined) { builder.SetZUnit(eumUnit.eumUmeter); } else { builder.SetZUnit(dfsu.ZUnit); } // Add dynamic items, copying from source for (int i = 0; i < dfsu.ItemInfo.Count; i++) { IDfsSimpleDynamicItemInfo itemInfo = dfsu.ItemInfo[i]; builder.AddDynamicItem(itemInfo.Name, itemInfo.Quantity); } // Create new file DfsuFile dfsuOut = builder.CreateFile(outputFilename); // Add new data float[] data2 = new float[elmtsIncluded.Count]; for (int i = 0; i < dfsu.NumberOfTimeSteps; i++) { for (int j = 0; j < dfsu.ItemInfo.Count; j++) { // Read data from existing dfsu IDfsItemData <float> itemData = (IDfsItemData <float>)dfsu.ReadItemTimeStep(j + 1, i); // Extract value for elements in new mesh for (int k = 0; k < elmtsIncluded.Count; k++) { data2[k] = itemData.Data[elmtsIncluded[k]]; } // write data dfsuOut.WriteItemTimeStepNext(itemData.Time, data2); } } dfsuOut.Close(); dfsu.Close(); }
/// <summary> /// Extract a single layer from a 3D dfsu file, and write it to a 2D dfsu file. /// <para> /// If a layer value does not exist for a certain 2D element, delete value is written /// to the 2D resut file. This is relevant for Sigma-Z type of files. /// </para> /// </summary> /// <param name="filenameDfsu3">Name of 3D dfsu source file</param> /// <param name="filenameDfsu2">Name of 2D dfsu result file</param> /// <param name="layerNumber">Layer to extract. /// <para> /// Positive values count from bottom up i.e. 1 is bottom layer, 2 is second layer from bottom etc. /// </para> /// <para> /// Negative values count from top down, i.e. -1 is toplayer, -2 is second layer from top etc. /// </para> /// </param> public static void ExtractDfsu2DLayerFrom3D(string filenameDfsu3, string filenameDfsu2, int layerNumber) { IDfsuFile dfsu3File = DfsFileFactory.DfsuFileOpen(filenameDfsu3); // Check that dfsu3 file is a 3D dfsu file. switch (dfsu3File.DfsuFileType) { case DfsuFileType.Dfsu2D: case DfsuFileType.DfsuVerticalColumn: case DfsuFileType.DfsuVerticalProfileSigma: case DfsuFileType.DfsuVerticalProfileSigmaZ: throw new InvalidOperationException("Input file is not a 3D dfsu file"); } // Calculate offset from toplayer element. Offset is between 0 (top layer) and // dfsu3File.NumberOfLayers-1 (bottom layer) int topLayerOffset; if (layerNumber > 0 && layerNumber <= dfsu3File.NumberOfLayers) { topLayerOffset = dfsu3File.NumberOfLayers - layerNumber; } else if (layerNumber < 0 && -layerNumber <= dfsu3File.NumberOfLayers) { topLayerOffset = -layerNumber - 1; } else { throw new ArgumentException("Layer number is out of range"); } double[] xv = dfsu3File.X; double[] yv = dfsu3File.Y; float[] zv = dfsu3File.Z; int[] cv = dfsu3File.Code; // -------------------------------------------------- // Create 2D mesh from 3D mesh // List of new 2D nodes int node2DCount = 0; List <double> xv2 = new List <double>(); List <double> yv2 = new List <double>(); List <float> zv2 = new List <float>(); List <int> cv2 = new List <int>(); // Renumbering array, from 3D node numbers to 2D node numbers // i.e. if a 3D element refers to node number k, the 2D element node number is renumber[k] int[] renumber = new int[dfsu3File.NumberOfNodes]; // Coordinates of last created node double xr2 = -1e-10; double yr2 = -1e-10; // Create 2D nodes, by skipping nodes with equal x,y coordinates for (int i = 0; i < dfsu3File.NumberOfNodes; i++) { // If 3D x,y coordinates are equal to the last created 2D node, // map this node to the last created 2D node, otherwise // create new 2D node and map to that one if (xv[i] != xr2 || yv[i] != yr2) { // Create new node node2DCount++; xr2 = xv[i]; yr2 = yv[i]; float zr2 = zv[i]; int cr2 = cv[i]; xv2.Add(xr2); yv2.Add(yr2); zv2.Add(zr2); cv2.Add(cr2); } // Map this 3D node to the last created 2D node. renumber[i] = node2DCount; } // Find indices of top layer elements IList <int> topLayer = dfsu3File.FindTopLayerElements(); // Create element table for 2D dfsu file int[][] elmttable2 = new int[topLayer.Count][]; for (int i = 0; i < topLayer.Count; i++) { // 3D element nodes int[] elmt3 = dfsu3File.ElementTable[topLayer[i]]; // 2D element nodes, only half as big, so copy over the first half int[] elmt2 = new int[elmt3.Length / 2]; for (int j = 0; j < elmt2.Length; j++) { elmt2[j] = renumber[elmt3[j]]; } elmttable2[i] = elmt2; } // -------------------------------------------------- // Create 2D dfsu file DfsuBuilder builder = DfsuBuilder.Create(DfsuFileType.Dfsu2D); // Setup header and geometry builder.SetNodes(xv2.ToArray(), yv2.ToArray(), zv2.ToArray(), cv2.ToArray()); builder.SetElements(elmttable2); builder.SetProjection(dfsu3File.Projection); builder.SetTimeInfo(dfsu3File.StartDateTime, dfsu3File.TimeStepInSeconds); if (dfsu3File.ZUnit == eumUnit.eumUUnitUndefined) { builder.SetZUnit(eumUnit.eumUmeter); } else { builder.SetZUnit(dfsu3File.ZUnit); } // Add dynamic items, copying from source, though not the first one, if it // contains the z-variation on the nodes for (int i = 0; i < dfsu3File.ItemInfo.Count; i++) { IDfsSimpleDynamicItemInfo itemInfo = dfsu3File.ItemInfo[i]; if (itemInfo.ElementCount == dfsu3File.NumberOfElements) { builder.AddDynamicItem(itemInfo.Name, itemInfo.Quantity); } } // Create file DfsuFile dfsu2File = builder.CreateFile(filenameDfsu2); // -------------------------------------------------- // Process data // Check if the layer number exists for 2D element, i.e. if that element // in 2D has that number of columnes in the 3D (relevant for sigma-z files) // If elementExists[i] is false, write delete value to file bool[] elementExists = new bool[topLayer.Count]; int numLayersInColumn = topLayer[0] + 1; elementExists[0] = (numLayersInColumn - topLayerOffset) > 0; for (int i = 1; i < topLayer.Count; i++) { numLayersInColumn = (topLayer[i] - topLayer[i - 1]); elementExists[i] = (numLayersInColumn - topLayerOffset) > 0; } // For performance, use predefined itemdata objects when reading data from dfsu 3D file IDfsItemData <float>[] dfsu3ItemDatas = new IDfsItemData <float> [dfsu3File.ItemInfo.Count]; for (int j = 0; j < dfsu3File.ItemInfo.Count; j++) { dfsu3ItemDatas[j] = (IDfsItemData <float>)dfsu3File.ItemInfo[j].CreateEmptyItemData(); } // Float data to write to dfsu 2D file float[] data2 = new float[dfsu2File.NumberOfElements]; float deleteValueFloat = dfsu2File.DeleteValueFloat; for (int i = 0; i < dfsu3File.NumberOfTimeSteps; i++) { for (int j = 0; j < dfsu3File.ItemInfo.Count; j++) { // Read data from 3D dfsu IDfsItemData <float> data3Item = dfsu3ItemDatas[j]; bool ok = dfsu3File.ReadItemTimeStep(data3Item, i); // 3D data float[] data3 = data3Item.Data; // Skip any items not having size = NumberOfElments (the z-variation on the nodes) if (data3.Length != dfsu3File.NumberOfElements) { continue; } // Loop over all 2D elements for (int k = 0; k < topLayer.Count; k++) { // Extract layer data from 3D column into 2D element value if (elementExists[k]) { data2[k] = data3[topLayer[k] - topLayerOffset]; } else { data2[k] = deleteValueFloat; } } dfsu2File.WriteItemTimeStepNext(data3Item.Time, data2); } } dfsu3File.Close(); dfsu2File.Close(); }
/// <summary> /// Example on how to extract dfs0 data from a 2D dfsu file for certain elements. All items /// from dfsu file are extracted. /// </summary> /// <param name="dfsuFileNamePath">Name, including path, of 2D dfsu file</param> /// <param name="elmtsIndices">Indices of elements to extract data from</param> /// <param name="useStream">Use stream when writing dfs0 files - then more than 400 files can be created simultaneously</param> public static void ExtractDfs0FromDfsu(string dfsuFileNamePath, IList <int> elmtsIndices, bool useStream) { // If not using stream approach, at most 400 elements at a time can be processed. // There is a limit on how many files you can have open at the same time using // the standard approach. It will fail in a nasty way, if the maximum number of // file handles are exceeded. This is not an issue when using .NET streams. if (!useStream && elmtsIndices.Count > 400) { throw new ArgumentException("At most 400 elements at a time"); } // Open source dfsu file IDfsuFile source; Stream stream = null; if (useStream) { stream = new FileStream(dfsuFileNamePath, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); source = DfsuFile.Open(stream); } else { source = DfsuFile.Open(dfsuFileNamePath); } // Figure out "basic" dfs0 file name string dfsuFilename = Path.GetFileNameWithoutExtension(dfsuFileNamePath); string path = Path.GetDirectoryName(dfsuFileNamePath); string dfs0BaseFilename = Path.Combine(path, "test_" + dfsuFilename + "-"); // Factory for creating dfs objects DfsFactory factory = new DfsFactory(); // Create a dfs0 file for each element in elmtsIndices DfsFile[] dfs0Files = new DfsFile[elmtsIndices.Count]; Stream[] dfs0Streams = new Stream [elmtsIndices.Count]; double timeSpan = source.TimeStepInSeconds * source.NumberOfTimeSteps; for (int k = 0; k < elmtsIndices.Count; k++) { // Index of element to create dfs0 for int elmtsIndex = elmtsIndices[k]; // Calculate element center coordinates, to be stored in dfs0 items. // Stored as float in dfs0, hence possible loss of precision... float x = 0, y = 0, z = 0; int[] nodeNumbers = source.ElementTable[elmtsIndex]; for (int i = 0; i < nodeNumbers.Length; i++) { int nodeIndex = nodeNumbers[i] - 1; // from number to index x += (float)source.X[nodeIndex]; y += (float)source.Y[nodeIndex]; z += source.Z[nodeIndex]; } x /= nodeNumbers.Length; y /= nodeNumbers.Length; z /= nodeNumbers.Length; // Start building dfs0 file header DfsBuilder builder = DfsBuilder.Create("fileTitle", "appTitle", 1); builder.SetDataType(1); // standard dfs0 value builder.SetGeographicalProjection(source.Projection); builder.SetTemporalAxis(factory.CreateTemporalEqCalendarAxis(eumUnit.eumUsec, source.StartDateTime, 0, source.TimeStepInSeconds)); // Add all dynamic items from dfsu file to dfs0 file for (int j = 0; j < source.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo sourceItem = source.ItemInfo[j]; DfsDynamicItemBuilder itemBuilder = builder.CreateDynamicItemBuilder(); itemBuilder.Set(sourceItem.Name, sourceItem.Quantity, sourceItem.DataType); itemBuilder.SetAxis(factory.CreateAxisEqD0()); itemBuilder.SetValueType(sourceItem.ValueType); itemBuilder.SetReferenceCoordinates(x, y, z); // optional builder.AddDynamicItem(itemBuilder.GetDynamicItemInfo()); } // Create and get file, store them in dfs0s array string dfs0Filename = dfs0BaseFilename + (elmtsIndex).ToString("000000") + ".dfs0"; if (useStream) { // Create file using C# streams - necessary to provie number of time steps and timespan of data builder.SetNumberOfTimeSteps(source.NumberOfTimeSteps); builder.SetTimeInfo(0, timeSpan); Stream dfs0FileStream = new FileStream(dfs0Filename, FileMode.Create, FileAccess.Write, FileShare.ReadWrite); builder.CreateStream(dfs0FileStream); dfs0Streams[k] = dfs0FileStream; } else { // Create file in the ordinary way. Will include statistics (of delete values etc). builder.CreateFile(dfs0Filename); } dfs0Files[k] = builder.GetFile(); } // For performance, use predefined itemdata objects when reading data from dfsu IDfsItemData <float>[] dfsuItemDatas = new IDfsItemData <float> [source.ItemInfo.Count]; for (int j = 0; j < source.ItemInfo.Count; j++) { dfsuItemDatas[j] = (IDfsItemData <float>)source.ItemInfo[j].CreateEmptyItemData(); } // Read data from dfsu and store in dfs0 float[] dfs0Data = new float[1]; for (int i = 0; i < source.NumberOfTimeSteps; i++) { for (int j = 0; j < source.ItemInfo.Count; j++) { // Read data from dfsu IDfsItemData <float> dfsuItemData = dfsuItemDatas[j]; bool ok = source.ReadItemTimeStep(dfsuItemData, i); float[] floats = dfsuItemData.Data; // write data to dfs0's for (int k = 0; k < elmtsIndices.Count; k++) { int elmtsIndex = elmtsIndices[k]; dfs0Data[0] = floats[elmtsIndex]; dfs0Files[k].WriteItemTimeStepNext(0, dfs0Data); } } } // Close dfsu files source.Close(); if (stream != null) { stream.Close(); } // Close all dfs0 files for (int k = 0; k < elmtsIndices.Count; k++) { dfs0Files[k].Close(); if (dfs0Streams[k] != null) { dfs0Streams[k].Close(); } } }