/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount, itemCount + 1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j + 1); } dfs0File.Reset(); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } res[i, j + 1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return(res); }
/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount,itemCount+1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j+1); } dfs0File.Reset(); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } res[i, j+1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return (res); }
/// <summary> /// Bulk write the times and data for a dfs0 file. /// <para> /// The <paramref name="data"/> contains a column for each /// item in the file. There are as many rows as there are timesteps. /// </para> /// </summary> public static void WriteDfs0DataDouble(IDfsFile dfs0File, double[] times, double[,] data) { int itemCount = dfs0File.ItemInfo.Count; if (times.Length != data.GetLength(0)) { throw new ArgumentException("Number of time steps does not match number of data rows"); } if (itemCount != data.GetLength(1)) { throw new ArgumentException("Number of items does not match number of data columns"); } bool[] isFloatItem = new bool[itemCount]; for (int j = 0; j < itemCount; j++) { isFloatItem[j] = dfs0File.ItemInfo[j].DataType == DfsSimpleType.Float; } float[] fdata = new float[1]; double[] ddata = new double[1]; dfs0File.Reset(); for (int i = 0; i < times.Length; i++) { for (int j = 0; j < itemCount; j++) { if (isFloatItem[j]) { fdata[0] = (float)data[i, j]; dfs0File.WriteItemTimeStepNext(times[i], fdata); } else { ddata[0] = data[i, j]; dfs0File.WriteItemTimeStepNext(times[i], ddata); } } } }
/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount, itemCount + 1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j + 1); } dfs0File.Reset(); // Check if time axis is really a time axis, or if it is a non-time axis eumUnit timeUnit = dfs0File.FileInfo.TimeAxis.TimeUnit; bool isTimeUnit = EUMWrapper.eumUnitsEqv((int)eumUnit.eumUsec, (int)timeUnit); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { if (isTimeUnit) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } else // not a time-unit, just return the value { res[i, 0] = itemData.Time; } } res[i, j + 1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return(res); }
/// <summary> /// Example of how to copy static data to dynamic item data in a Dfs file. /// <para> /// Static data is by default not visible in many tools and editors. This is a way to make static data visible as dynamic data. /// </para> /// </summary> /// <param name="sourceFilename">Path and name of the source dfs file</param> /// <param name="filename">Path and name of the new file to create</param> public static void CopyStaticToDynamicDfsFile(string sourceFilename, string filename) { IDfsFile source = DfsFileFactory.DfsGenericOpen(sourceFilename); IDfsFileInfo fileInfo = source.FileInfo; DfsBuilder builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion); // Set up the header builder.SetDataType(fileInfo.DataType); builder.SetGeographicalProjection(fileInfo.Projection); builder.SetTemporalAxis(fileInfo.TimeAxis); builder.SetItemStatisticsType(fileInfo.StatsType); builder.DeleteValueByte = fileInfo.DeleteValueByte; builder.DeleteValueDouble = fileInfo.DeleteValueDouble; builder.DeleteValueFloat = fileInfo.DeleteValueFloat; builder.DeleteValueInt = fileInfo.DeleteValueInt; builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt; // Transfer compression keys - if any. if (fileInfo.IsFileCompressed) { int[] xkey; int[] ykey; int[] zkey; fileInfo.GetEncodeKey(out xkey, out ykey, out zkey); builder.SetEncodingKey(xkey, ykey, zkey); } // Copy custom blocks - if any foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks) { builder.AddCustomBlock(customBlock); } // Copy static items IDfsStaticItem sourceStaticItem; while (null != (sourceStaticItem = source.ReadStaticItemNext())) { DfsDynamicItemBuilder dynStatItem = builder.CreateDynamicItemBuilder(); dynStatItem.Set(sourceStaticItem.Name, sourceStaticItem.Quantity, sourceStaticItem.DataType); dynStatItem.SetAxis(sourceStaticItem.SpatialAxis); dynStatItem.SetValueType(DataValueType.Instantaneous); builder.AddDynamicItem(dynStatItem.GetDynamicItemInfo()); } // Create file builder.CreateFile(filename); // Get the file DfsFile file = builder.GetFile(); source.Reset(); // Copy static data to dynamic item data while (null != (sourceStaticItem = source.ReadStaticItemNext())) { file.WriteItemTimeStepNext(0, sourceStaticItem.Data); } source.Close(); file.Close(); }