public static DfsFile CreateFromTemplate(IDfsFile dfsTemplate, string outputfile, int nRepeats) { IDfsFileInfo fi = dfsTemplate.FileInfo; //this._AnalyzeDfsInputItems(dfsTemplate.ItemInfo); var builder = DfsBuilder.Create(fi.FileTitle, fi.ApplicationTitle, fi.ApplicationVersion); CreateHeader(fi, builder); var isDfsu3d = _IsDfsu3d(dfsTemplate.ItemInfo); if (nRepeats > 1) { if (isDfsu3d) { builder.AddDynamicItem(dfsTemplate.ItemInfo[0]); } _CreateRepeatedDynamicItems(builder, dfsTemplate.ItemInfo, nRepeats); } else { var items = Enumerable.Range(0, _NumberItems(dfsTemplate.ItemInfo)).ToList(); CreateDynamicItems(builder, dfsTemplate.ItemInfo, items); } builder.CreateFile(outputfile); IDfsStaticItem staticItem; while ((staticItem = dfsTemplate.ReadStaticItemNext()) != null) { builder.AddStaticItem(staticItem); } return(builder.GetFile()); }
/// <summary> /// Example on how to modify a custom block. /// <para> /// The method assumes that a dfs2 file with the "M21_Misc" custom block, alike /// the OresundHD.dfs2 test file, is the input file. /// </para> /// </summary> /// <param name="filename">path and name of dfs2 test file</param> public static void CustomBlockModify(string filename) { IDfsFile dfsFile = DfsFileFactory.DfsGenericOpenEdit(filename); IDfsFileInfo fileInfo = dfsFile.FileInfo; IDfsCustomBlock customBlock = fileInfo.CustomBlocks[0]; customBlock[3] = 25; dfsFile.Close(); }
static void Main(string[] args) { int[] nodeNumber = new int[] { 899, 2686, 2856, 2866, 2331, 3806, 2231, 3831 }; IDfsFile resFile = DfsFileFactory.DfsGenericOpenEdit(@"E:\FFWS\Model\MIKEHYDRO\GBM_MIKEHYDRO.mhydro - Result Files\RiverBasin_GBM.dfs0"); IDfsFileInfo resfileInfo = resFile.FileInfo; int noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; double[] timeSpan = new double[noTimeSteps]; for (int j = 0; j < noTimeSteps; j++) { timeSpan[j] = resFile.ReadItemTimeStep(899, j).Time; } foreach (int element in nodeNumber) { IDfsItemData <float> data; float[] QSimvalues = new float[noTimeSteps]; for (int j = 0; j < noTimeSteps; j++) { data = (IDfsItemData <float>)resFile.ReadItemTimeStep(element, j); QSimvalues[j] = Convert.ToSingle(data.Data[0]); } DfsFactory factory = new DfsFactory(); string filename = @"E:\FFWS\Model\BrahmaputraHD\Boundary\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element.ToString(), element.ToString(), 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(startDate.Year, startDate.Month, startDate.Day, startDate.Hour, startDate.Minute, startDate.Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element.ToString(), eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < noTimeSteps; j++) { file.WriteItemTimeStepNext(timeSpan[j], new float[] { QSimvalues[j] }); } file.Close(); } }
/// <summary> /// Updates information in the header - <see cref="IDfsFileInfo"/>. /// <para> /// The method assumes that the OresundHD.dfs2 test file /// (or preferably a copy of it) is the input file. /// </para> /// <para> /// Strings are padded with zeros, when too short, and truncated when too long. /// </para> /// </summary> /// <param name="filename">path and name of OresundHD.dfs2 test file</param> public static void FileInfoModify(string filename) { IDfsFile dfsFile = DfsFileFactory.DfsGenericOpenEdit(filename); IDfsFileInfo fileInfo = dfsFile.FileInfo; // Modify values fileInfo.FileTitle = "ups"; fileInfo.ApplicationTitle = "Short title"; fileInfo.ApplicationVersion = 12; fileInfo.DataType = 10101; fileInfo.DeleteValueFloat = -5.5e-25f; fileInfo.DeleteValueByte = 7; fileInfo.DeleteValueDouble = -7.7e-114; fileInfo.DeleteValueInt = -123456; fileInfo.DeleteValueUnsignedInt = 123456; dfsFile.Close(); }
/// <summary> /// Introductory example of how to load a dfs0 file. /// <para> /// The method assumes that the Rain_stepaccumulated.dfs0 test file /// is the input file. /// </para> /// </summary> /// <param name="filename">path and name of Rain_stepaccumulated.dfs0 test file</param> public static double ReadDfs0File(string filename) { // Open the file as a generic dfs file IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs0File.FileInfo; int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 19 // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "Rain" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Double ValueType valueType = dynamicItemInfo.ValueType; // StepAccumulated // Read data of first item, third time step (items start by 1, timesteps by 0), IDfsItemData datag = dfs0File.ReadItemTimeStep(1, 2); double value1 = System.Convert.ToDouble(datag.Data.GetValue(0)); // 0.36 // Assuming this is a double value, the item data object can be converted to the correct type IDfsItemData <double> data = (IDfsItemData <double>)datag; double value2 = data.Data[0]; // 0.36 // This iterates through all timesteps and items in the file // For performance reasons it is important to iterate over time steps // first and items second. double sum = 0; for (int i = 0; i < steps; i++) { for (int j = 1; j <= dfs0File.ItemInfo.Count; j++) { data = (IDfsItemData <double>)dfs0File.ReadItemTimeStep(j, i); double value = data.Data[0]; sum += value; } } dfs0File.Close(); return(sum); }
private DfsFile _CreateFromTemplate(IDfsFile dfsTemplate, string outputfile, IEnumerable <int> timesteps, int stride, List <int> items) { IDfsFileInfo fi = dfsTemplate.FileInfo; //this._AnalyzeDfsInputItems(dfsTemplate.ItemInfo); var builder = DfsBuilder.Create(fi.FileTitle, fi.ApplicationTitle, fi.ApplicationVersion); IDfsTemporalAxis timeAxis = _CorrectTimeAxis(fi.TimeAxis, timesteps.First(), stride); DfsOutput.CreateHeader(fi, builder, timeAxis); DfsOutput.CreateDynamicItems(builder, dfsTemplate.ItemInfo, items); builder.CreateFile(outputfile); IDfsStaticItem staticItem; while ((staticItem = dfsTemplate.ReadStaticItemNext()) != null) { builder.AddStaticItem(staticItem); } return(builder.GetFile()); }
/// <summary> /// Introductory example of how to load a dfs0 file with a non-time axis /// as the primary axis. The important part here is to NOT call /// the <code>data.TimeInSeconds()</code>, because that will fail. /// </summary> /// <param name="filename">path and name of Added_Mass.dfs0 test file</param> public static double ReadNonTimeAxisDfs0(string filename) { // Open the file as a generic dfs file IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs0File.FileInfo; // The TimeAxis is not a time axis, but a regular axis int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 256 TimeAxisType timeAxisType = fileInfo.TimeAxis.TimeAxisType; // TimeNonEquidistant eumUnit timeUnit = fileInfo.TimeAxis.TimeUnit; // radian-per-second // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "DOF_1-1" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Float ValueType valueType = dynamicItemInfo.ValueType; // Instantaneous // This iterates through all timesteps and items in the file // For performance reasons it is important to iterate over time steps // first and items second. double sum = 0; for (int i = 0; i < steps; i++) { for (int j = 1; j <= dfs0File.ItemInfo.Count; j++) { var data = (IDfsItemData <float>)dfs0File.ReadItemTimeStep(j, i); // The Time axis value is not a time value but in radian-per-second. double axisValue = data.Time; float value = data.Data[0]; sum += value; } } dfs0File.Close(); return(sum); }
/// <summary> /// dfs2 reader. Gets information from the dfs file, and reads data. /// </summary> /// <param name="dfsfile">full path string to dfs2 file.</param> public DFS2Reader(string dfsfile) { throw new NotImplementedException("ToDo dfs2"); // Open the file as a generic dfs file _dfs2File = DfsFileFactory.Dfs2FileOpen(dfsfile); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = _dfs2File.FileInfo; // Check for dfs compliance CheckDFSCompliance(); // Number of time steps (same for all items) _numTimeSteps = fileInfo.TimeAxis.NumberOfTimeSteps; // Number of variable items in dfs2 _numItems = _dfs2File.ItemInfo.Count; // Add the IDs to list (Keys) _itemIDs = new List <string>(); foreach (var itemInfo in _dfs2File.ItemInfo) { _itemIDs.Add(itemInfo.Name); } _times = _dfs2File.FileInfo.TimeAxis.GetDateTimes().ToList(); // Delelte Values _deleteValueDouble = _dfs2File.FileInfo.DeleteValueDouble; _deleteValueFloat = _dfs2File.FileInfo.DeleteValueFloat; _xyLayerPoints = new List <IXYLayerPoint>(); foreach (var itemInfo in _dfs2File.ItemInfo) { throw new NotImplementedException("ToDo dfs2"); } }
/// <summary> /// Introductory example of how to load a dfs2 file. /// <para> /// The method assumes that the OresundHD.dfs2 test file /// is the input file. /// </para> /// </summary> /// <param name="filename">path and name of OresundHD.dfs2 test file</param> public static void ReadingDfs2File(string filename) { // Open the file as a dfs2 file Dfs2File dfs2File = DfsFileFactory.Dfs2FileOpen(filename); // Spatial axis for this file is a 2D equidistant axis IDfsAxisEqD2 axisEqD2 = ((IDfsAxisEqD2)dfs2File.SpatialAxis); double dx = axisEqD2.Dx; // 900 double dy = axisEqD2.Dy; // 900 // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs2File.FileInfo; int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 13 string projectionString = fileInfo.Projection.WKTString; // "UTM-33" // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs2File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "H Water Depth m" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Float // Read data of first item, third time step (items start by 1, timesteps by 0), // assuming data is of type float. IDfsItemData2D <float> data2D = (IDfsItemData2D <float>)dfs2File.ReadItemTimeStep(1, 2); // Get the value at (i,j) = (3,4) of the item and timestep float value = data2D[3, 4]; // 11.3634329 // This iterates through all the timesteps and items in the file // For performance reasons it is important to iterate over time steps // first and items second. for (int i = 0; i < steps; i++) { for (int j = 1; j <= dfs2File.ItemInfo.Count; j++) { data2D = (IDfsItemData2D <float>)dfs2File.ReadItemTimeStep(j, i); value = data2D[3, 4]; } } }
/// <summary> /// Introductory example of how to load a dfs1 file. /// <para> /// The method assumes that the wln.dfs1 test file /// is the input file. /// </para> /// </summary> /// <param name="filename">path and name of wln.dfs1 test file</param> public static void ReadingDfs1File(string filename) { // Open the file as a dfs1 file Dfs1File dfs1File = DfsFileFactory.Dfs1FileOpen(filename); // Spatial axis for this file is a 2D equidistant axis IDfsAxisEqD1 axisEqD1 = ((IDfsAxisEqD1)dfs1File.SpatialAxis); double dx = axisEqD1.Dx; // 900 // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs1File.FileInfo; int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 577 // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs1File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "WL-N (m)" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Float // Read data of first item, third time step (items start by 1, timesteps by 0), // assuming data is of type float. IDfsItemData<float> data = (IDfsItemData<float>)dfs1File.ReadItemTimeStep(1, 2); }
public static void CreateHeader(IDfsFileInfo fi, DfsBuilder builder, IDfsTemporalAxis timeAxis) { builder.SetDataType(fi.DataType); builder.SetGeographicalProjection(fi.Projection); builder.SetTemporalAxis(timeAxis); builder.SetItemStatisticsType(fi.StatsType); builder.DeleteValueByte = fi.DeleteValueByte; builder.DeleteValueDouble = fi.DeleteValueDouble; builder.DeleteValueFloat = fi.DeleteValueFloat; builder.DeleteValueInt = fi.DeleteValueInt; builder.DeleteValueUnsignedInt = fi.DeleteValueUnsignedInt; if (fi.IsFileCompressed) { int[] xKey; int[] yKey; int[] zKey; fi.GetEncodeKey(out xKey, out yKey, out zKey); builder.SetEncodingKey(xKey, yKey, zKey); } foreach (var cb in fi.CustomBlocks) { builder.AddCustomBlock(cb); } }
private void btnDFS0Fromlist_Click(object sender, EventArgs e) { if (fileName == "") { MessageBox.Show("No files have been selected for processing...\nPlease Load a file first."); } else { string[] requiredDFS0File = File.ReadAllLines((fileName.Substring(0, fileName.Length - 6) + ".txt")); string[] availableDFS0 = File.ReadAllLines((fileName.Substring(0, fileName.Length - 6) + "_xy.txt")); foreach (string element in requiredDFS0File) { for (int i = 0; i < availableDFS0.Length; i++) { if (element == availableDFS0[i]) { string itemType = element.Substring(0, 2); if (itemType == "WL") { DfsFactory factory = new DfsFactory(); string filename = dfs0Path + @"\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element, element, 2012); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element, eumQuantity.Create(eumItem.eumIWaterLevel, eumUnit.eumUmeter), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < dfsDate.Count; j++) { file.WriteItemTimeStepNext((dfsDate[j] - dfsDate[0]).TotalSeconds, new float[] { dfsData[j, i] }); } file.Close(); } else if (itemType == "Q,") { DfsFactory factory = new DfsFactory(); string filename = dfs0Path + @"\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element, element, 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element, eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < dfsDate.Count; j++) { file.WriteItemTimeStepNext((dfsDate[j] - dfsDate[0]).TotalSeconds, new float[] { dfsData[j, i] }); } file.Close(); } } } } MessageBox.Show("Result file processed suceesssfully."); } }
private void btnLoadHD_Click(object sender, EventArgs e) { try { label2.Visible = true; OpenFileDialog dialog = new OpenFileDialog(); dialog.Filter = "Mike HD Result Files|*.RES11"; if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel) { fileName = dialog.FileName; } ProcessStartInfo start = new ProcessStartInfo(); Process exeProcess = new Process(); start.FileName = @"C:\Program Files\DHI\2014\bin\res11read.exe"; start.Arguments = "-xy " + fileName + " " + fileName.Substring(0, fileName.Length - 6) + "_xy.txt"; exeProcess = Process.Start(start); start.CreateNoWindow = true; exeProcess.WaitForExit(); string[] riverChainageFile = File.ReadAllLines(fileName.Substring(0, fileName.Length - 6) + "_xy.txt"); char[] charSeparators = new char[] { ' ' }; StringBuilder sb = new StringBuilder(); for (int i = 19; i < riverChainageFile.Length - 3; i++) { var texts = riverChainageFile[i].Substring(24, 140).Split(charSeparators, StringSplitOptions.RemoveEmptyEntries); if (texts[2] == "2") { QItems.Add("Q," + texts[0] + "," + texts[1]); } else if (texts[2] == "0" || texts[2] == "1") { WLItems.Add("WL," + texts[0] + "," + texts[1]); } } for (int i = 0; i < WLItems.Count; i++) { sb.AppendLine(WLItems[i]); comboBox1.Items.Add(WLItems[i]); } for (int i = 0; i < QItems.Count; i++) { sb.AppendLine(QItems[i]); comboBox1.Items.Add(QItems[i]); } File.Delete(fileName.Substring(0, fileName.Length - 6) + "_xy.txt"); File.WriteAllText(fileName.Substring(0, fileName.Length - 6) + "_xy.txt", sb.ToString()); IDfsFile resFile = DfsFileFactory.DfsGenericOpen(fileName); DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; IDfsFileInfo resfileInfo = resFile.FileInfo; IDfsItemData <float> data; noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; int cx = 0; for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, 0); cx = cx + dynamicItemInfo.ElementCount; } MessageBox.Show(cx.ToString()); for (int i = 0; i < noTimeSteps; i++) { dfsDate.Add(startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time)); } for (int i = 0; i < noTimeSteps; i++) { int counter = 0; int totalNode = 0; for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); counter = dynamicItemInfo.ElementCount; for (int z = 0; z < counter; z++) { if (totalNode < comboBox1.Items.Count) { dfsData[i, totalNode] = (Convert.ToSingle(data.Data[z])); totalNode = totalNode + 1; } else { break; } } } } var filepath = fileName.Split('\\'); dfs0Path = filepath[0]; for (int i = 1; i < filepath.Length - 1; i++) { dfs0Path = dfs0Path + @"\" + filepath[i]; } label2.Text = "Loaded successfully."; } catch (Exception error) { MessageBox.Show("File have not loaded. Error: " + error.Message); } }
private void btnLoadNAM_Click(object sender, EventArgs e) { OpenFileDialog dialog = new OpenFileDialog(); dialog.Filter = "Mike NAM Result Files|*.RES11"; if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel) { fileName = dialog.FileName; } var filepath = fileName.Split('\\'); dfs0Path = filepath[0]; for (int i = 1; i < filepath.Length - 1; i++) { dfs0Path = dfs0Path + @"\" + filepath[i]; } IDfsFile resFile = DfsFileFactory.DfsGenericOpenEdit(fileName); IDfsFileInfo resfileInfo = resFile.FileInfo; DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; IDfsItemData <float> data; int noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; float[] values = new float[noTimeSteps]; for (int i = 0; i < noTimeSteps; i++) { dfsDate.Add(startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time)); } for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; string nameOftDynamicItem = dynamicItemInfo.Name; string checkname = nameOftDynamicItem.Substring(0, 6); if (checkname == "RunOff") { string filename = dfs0Path + @"\" + nameOftDynamicItem + ".dfs0"; DfsFactory factory = new DfsFactory(); DfsBuilder filecreator = DfsBuilder.Create(nameOftDynamicItem, nameOftDynamicItem, 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); //filecreator.SetTemporalAxis(factory.CreateTemporalEqCalendarAxis(eumUnit.eumUsec, new DateTime(2010, 01, 01, 06, 00, 00), 0, 10800)); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(nameOftDynamicItem, eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); for (int i = 0; i < noTimeSteps; i++) { data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); values[i] = Convert.ToSingle(data.Data[0]); file.WriteItemTimeStepNext((dfsDate[i] - dfsDate[0]).TotalSeconds, new float[] { values[i] }); } file.Close(); } } }
private void btnSingleDFS0_Click(object sender, EventArgs e) { try { if (fileName == "") { MessageBox.Show("No files have been selected for processing...\nPlease Load a file first."); } else { string itemType = comboBox1.SelectedItem.ToString().Substring(0, 2); if (itemType == "WL") { string element = comboBox1.SelectedItem.ToString().Substring(0, comboBox1.SelectedItem.ToString().Length - 4); DfsFactory factory = new DfsFactory(); string filename = dfs0Path + @"\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element, element, 2012); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element, eumQuantity.Create(eumItem.eumIWaterLevel, eumUnit.eumUmeter), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < dfsDate.Count; j++) { file.WriteItemTimeStepNext((dfsDate[j] - dfsDate[0]).TotalSeconds, new float[] { dfsData[j, comboBox1.SelectedIndex] }); } file.Close(); } else if (itemType == "Q,") { string element = comboBox1.SelectedItem.ToString(); DfsFactory factory = new DfsFactory(); string filename = dfs0Path + @"\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element, element, 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element, eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < dfsDate.Count; j++) { file.WriteItemTimeStepNext((dfsDate[j] - dfsDate[0]).TotalSeconds, new float[] { dfsData[j, comboBox1.SelectedIndex] }); } file.Close(); } MessageBox.Show("Result file processed suceesssfully."); } } catch (Exception error) { MessageBox.Show("HD Model Result files cannot be processed due to an error. Error: " + error.Message); } }
static void Main(string[] args) { try { StringBuilder sb = new StringBuilder(); OpenFileDialog dialog = new OpenFileDialog(); dialog.Filter = "Mike HD Result Files|*.RES11"; if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel) { IDfsFile resFile = DfsFileFactory.DfsGenericOpen(dialog.FileName); DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; IDfsFileInfo resfileInfo = resFile.FileInfo; IDfsItemData <float> data; int noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; DateTime[] dfsDate = new DateTime[noTimeSteps]; List <float> dfsWLData = new List <float>(); List <float> dfsQData = new List <float>(); for (int i = 0; i < noTimeSteps; i++) { dfsDate[i] = startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time); } int totalWNode = 0; int totalQNode = 0; for (int i = 0; i < noTimeSteps; i++) { int Wcounter = 0; int nodeWCount = 0; int Qcounter = 0; int nodeQCount = 0; for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; string nameOftDynamicItem = dynamicItemInfo.Name; string WLname = nameOftDynamicItem.Substring(0, 11); string Qname = nameOftDynamicItem.Substring(0, 9); if (WLname == "Water Level") { Wcounter = dynamicItemInfo.ElementCount; data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); for (int z = 0; z < Wcounter; z++) { dfsWLData.Add(Convert.ToSingle(data.Data[z])); nodeWCount = nodeWCount + 1; } } else if (Qname == "Discharge") { Qcounter = dynamicItemInfo.ElementCount; data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); for (int z = 0; z < Qcounter; z++) { dfsQData.Add(Convert.ToSingle(data.Data[z])); nodeQCount = nodeQCount + 1; } } } Console.WriteLine(i); totalWNode = nodeWCount; totalQNode = nodeQCount; } for (int i = 0; i < noTimeSteps; i++) { for (int j = 0; j < totalWNode; j++) { sb.AppendLine(dfsDate[i] + "," + (j + 1) + "," + dfsWLData[i * totalWNode + j]); } File.AppendAllText(dialog.FileName.Substring(0, dialog.FileName.Length - 6) + "_WL.csv", sb.ToString()); sb.Clear(); } for (int i = 0; i < noTimeSteps; i++) { for (int j = 0; j < totalQNode; j++) { sb.AppendLine(dfsDate[i] + "," + (j + 1) + "," + dfsQData[i * totalQNode + j]); } File.AppendAllText(dialog.FileName.Substring(0, dialog.FileName.Length - 6) + "_Q.csv", sb.ToString()); sb.Clear(); } Console.WriteLine("Result file processed suceesssfully."); Console.WriteLine("Press any key to exit..."); Console.ReadKey(); } } catch (Exception error) { Console.WriteLine("HD Model Result files cannot be processed due to an error. Error: " + error.Message); Console.WriteLine("Press any key to exit..."); Console.ReadKey(); } }
/// <summary> /// Example of how to copy a Dfs file. /// <para> /// This example is intended to show how to generically copy a file. In /// case a copy with modified data is required, this could be used as a base /// for the copy. /// </para> /// </summary> /// <param name="sourceFilename">Path and name of the source dfs file</param> /// <param name="filename">Path and name of the new file to create</param> public static void CopyDfsFile(string sourceFilename, string filename) { IDfsFile source = DfsFileFactory.DfsGenericOpen(sourceFilename); IDfsFileInfo fileInfo = source.FileInfo; DfsBuilder builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion); // Set up the header builder.SetDataType(fileInfo.DataType); builder.SetGeographicalProjection(fileInfo.Projection); builder.SetTemporalAxis(fileInfo.TimeAxis); builder.SetItemStatisticsType(fileInfo.StatsType); builder.DeleteValueByte = fileInfo.DeleteValueByte; builder.DeleteValueDouble = fileInfo.DeleteValueDouble; builder.DeleteValueFloat = fileInfo.DeleteValueFloat; builder.DeleteValueInt = fileInfo.DeleteValueInt; builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt; // Transfer compression keys - if any. if (fileInfo.IsFileCompressed) { int[] xkey; int[] ykey; int[] zkey; fileInfo.GetEncodeKey(out xkey, out ykey, out zkey); builder.SetEncodingKey(xkey, ykey, zkey); } // Copy custom blocks - if any foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks) { builder.AddCustomBlock(customBlock); } // Copy dynamic items foreach (var itemInfo in source.ItemInfo) { builder.AddDynamicItem(itemInfo); } // Create file builder.CreateFile(filename); // Copy static items IDfsStaticItem sourceStaticItem; while (null != (sourceStaticItem = source.ReadStaticItemNext())) { builder.AddStaticItem(sourceStaticItem); } // Get the file DfsFile file = builder.GetFile(); // Copy dynamic item data IDfsItemData sourceData; while (null != (sourceData = source.ReadItemTimeStepNext())) { file.WriteItemTimeStepNext(sourceData.Time, sourceData.Data); } source.Close(); file.Close(); }
//private readonly List<IExchangeItem> _exchangeItems; //private readonly List<TimeSerie> _timeSeries; /// <summary> /// DFS0 reader. Gets information from the dfs file, and reads data. /// </summary> /// <param name="dfsfile">full path string to dfs0 file.</param> public Dfs0Reader(string dfsfile) : base(dfsfile) { // Set ObservationFile if (!File.Exists(dfsfile)) { throw new FileNotFoundException("\n ERROR: DFS File Not Found! \n Could not find: {0} \n", dfsfile); } // Determine Type string fileExtension = Path.GetExtension(dfsfile); if (System.String.Compare(fileExtension, ".dfs0", System.StringComparison.OrdinalIgnoreCase) == 0) { fileExtension = Path.GetExtension(dfsfile); } else { throw new Exception("\n ERROR: Observation File Type Incorrect! Expecting dfs0. \n \n"); } // Open the file as a generic dfs file _dfs0File = DfsFileFactory.DfsGenericOpen(dfsfile); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = _dfs0File.FileInfo; // Check for dfs compliance CheckDFSCompliance(); // Number of time steps (same for all items) _numTimeSteps = fileInfo.TimeAxis.NumberOfTimeSteps; // Starting from... int _firstTimeStepIndex = fileInfo.TimeAxis.FirstTimeStepIndex; // Number of variable items in dfs0 _numItems = _dfs0File.ItemInfo.Count; // Add the IDs to list (Keys) _itemIDs = new List <string>(); _quantities = new List <string>(); _xyLayerPoints = new List <IXYLayerPoint>(); foreach (var itemInfo in _dfs0File.ItemInfo) { String name = itemInfo.Name; var coords = name.Split(','); double x = Convert.ToDouble(coords[0]); double y = Convert.ToDouble(coords[1]); int zLayer = Convert.ToInt32(coords[2]); _quantities.Add(_dfs0File.FileInfo.FileTitle); _itemIDs.Add(name); _xyLayerPoints.Add(new XYLayerPoint(x, y, zLayer)); } //Gather all times _times = _dfs0File.FileInfo.TimeAxis.GetDateTimes().ToList(); _times = _timesteps; DateTime firstTime = _times[0]; if (_dfs0File.FileInfo.TimeAxis.TimeAxisType != TimeAxisType.CalendarEquidistant) { //Handle pseudo irreggular files double[] dates = new double[_numTimeSteps]; //just make 1 bigger for easy indexing for (int iTimeStep = _firstTimeStepIndex; iTimeStep < _numTimeSteps; iTimeStep++) { for (int iItem = 1; iItem < _numItems + 1; iItem++) { IDfsItemData data1 = _dfs0File.ReadItemTimeStep(iItem, iTimeStep); double offsetTime = data1.Time; if (iItem == 1) { dates[iTimeStep] = offsetTime; } else { if (Math.Abs(offsetTime - dates[iTimeStep]) > 1.0) { throw new Exception("Non Equidistant Calander is not regular"); } } } if (iTimeStep > 0) { _times[iTimeStep] = _times[0].AddSeconds(dates[iTimeStep]); } } } IList <IDfsDynamicItemInfo> infoAllTimes = _dfs0File.ItemInfo; String TimeSeriesName = infoAllTimes[0].Name; // Delelte Values _deleteValueDouble = _dfs0File.FileInfo.DeleteValueDouble; _deleteValueFloat = _dfs0File.FileInfo.DeleteValueFloat; }
/// <summary> /// Read Time series data DFS0 to FEWS PI memory data structure /// </summary> /// <param name="pi">Memory data structure where content of the DFS0 file will be /// add</param> /// <param name="rootPath">Root directory where dfs0 files are placed</param> /// <param name="relativePath">Full file path relative to rootPath</param> /// <param name="ensembleId">Ensemble Id identifying where to put data from /// file</param> /// <param name="ensembleMemberId">Ensemble member Id identifying where to put data /// from file</param> /// <param name="ensembleMemberIndex">Ensemble member index</param> public bool ReadDfs0File(ref PI pi, string rootPath, string relativePath, string ensembleId, string ensembleMemberId, int ensembleMemberIndex) { var dfs0File = DfsFileFactory.DfsGenericOpen(Path.Combine(rootPath, relativePath)); IDfsFileInfo fileInfo = dfs0File.FileInfo; int numberOfTimeSteps = fileInfo.TimeAxis.NumberOfTimeSteps; DateTime start = DateTime.MinValue; DateTime end = DateTime.MinValue; if (fileInfo.TimeAxis.IsCalendar()) { if (dfs0File.FileInfo.TimeAxis.IsEquidistant()) { start = (dfs0File.FileInfo.TimeAxis as IDfsEqCalendarAxis).StartDateTime; } else { start = (dfs0File.FileInfo.TimeAxis as IDfsNonEqCalendarAxis).StartDateTime; } } for (int itemIndex = 0; itemIndex < dfs0File.ItemInfo.Count; itemIndex++) { var ts = new TimeSeries(); switch (dfs0File.ItemInfo[itemIndex].ValueType = DataValueType.Instantaneous) { case DataValueType.Instantaneous: ts.Type = "instantaneous"; break; default: ts.Type = "instantaneous"; break; } ts.X = dfs0File.ItemInfo[itemIndex].ReferenceCoordinateX; ts.Y = dfs0File.ItemInfo[itemIndex].ReferenceCoordinateY; ts.Z = dfs0File.ItemInfo[itemIndex].ReferenceCoordinateZ; ts.LocationId = relativePath; if (!string.IsNullOrEmpty(ensembleId)) { ts.EnsembleId = ensembleId; ts.EnsembleMemberId = ensembleMemberId; ts.EnsembleMemberIndex = ensembleMemberIndex; } if (dfs0File.ItemInfo.Count > 1) { ts.LocationId = $"{ts.LocationId}|{dfs0File.ItemInfo[itemIndex].Name}"; } ts.TimeStep = new TimeSpan(1, 0, 0); ts.StationName = ts.LocationId; ts.ParameterId = dfs0File.ItemInfo[itemIndex].Quantity.Item.ToString() + ";" + dfs0File.ItemInfo[itemIndex].Quantity.Unit.ToString(); ts.MissVal = -999999.9; ts.StartDate = start; ts.EndDate = start; ts.Units = ""; var deleteVal = dfs0File.FileInfo.DeleteValueFloat; DateTime step1 = DateTime.MinValue; DateTime step2 = DateTime.MinValue; for (int timeStepIndex = 0; timeStepIndex < fileInfo.TimeAxis.NumberOfTimeSteps; timeStepIndex++) { double value = ts.MissVal.Value; var values = dfs0File.ReadItemTimeStep(itemIndex + 1, timeStepIndex); float fvalue = (float)(values.Data.GetValue(0)); if (Math.Abs(fvalue - deleteVal) > float.Epsilon) { value = fvalue; } var time = values.TimeAsDateTime(dfs0File.FileInfo.TimeAxis); ts.Values.Add(time, new TSValue(value)); ts.EndDate = time; if (step1 == DateTime.MinValue) { step1 = time; } else if (step2 == DateTime.MinValue) { ts.TimeStep = time - step1; step2 = time; } } pi.TimeSeries.Add(ts); } return(true); }
/// <summary> /// Create a new file, being the difference of two files. /// <para> /// The two input files must be equal in structure, e.g. coming /// from the same simulation but giving different results. /// Header and static data must be identical, only difference /// must be in values of the dynamic data. /// </para> /// </summary> public static void CreateDiffFile(string file1, string file2, string filediff = null) { IDfsFile dfs1 = DfsFileFactory.DfsGenericOpen(file1); IDfsFile dfs2 = DfsFileFactory.DfsGenericOpen(file2); // Validate that it has the same number of items. if (dfs1.ItemInfo.Count != dfs2.ItemInfo.Count) { throw new Exception("Number of dynamic items does not match"); } int numItems = dfs1.ItemInfo.Count; // In case number of time steps does not match, take the smallest. int numTimes = dfs1.FileInfo.TimeAxis.NumberOfTimeSteps; if (numTimes > dfs2.FileInfo.TimeAxis.NumberOfTimeSteps) { numTimes = dfs2.FileInfo.TimeAxis.NumberOfTimeSteps; Console.Out.WriteLine("Number of time steps does not match, using the smallest number"); } // For recording max difference for every item double[] maxDiff = new double[dfs1.ItemInfo.Count]; // Index in time (index) of maximum and first difference. -1 if no difference int[] maxDiffTime = new int[dfs1.ItemInfo.Count]; int[] firstDiffTime = new int[dfs1.ItemInfo.Count]; for (int i = 0; i < dfs1.ItemInfo.Count; i++) { maxDiffTime[i] = -1; firstDiffTime[i] = -1; } // Copy over info from the first file, assuming the second file contains the same data. IDfsFileInfo fileInfo = dfs1.FileInfo; DfsBuilder builder = null; if (!string.IsNullOrEmpty(filediff)) { builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion); // Set up the header builder.SetDataType(fileInfo.DataType); builder.SetGeographicalProjection(fileInfo.Projection); builder.SetTemporalAxis(fileInfo.TimeAxis); builder.SetItemStatisticsType(fileInfo.StatsType); builder.DeleteValueByte = fileInfo.DeleteValueByte; builder.DeleteValueDouble = fileInfo.DeleteValueDouble; builder.DeleteValueFloat = fileInfo.DeleteValueFloat; builder.DeleteValueInt = fileInfo.DeleteValueInt; builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt; // Transfer compression keys. if (fileInfo.IsFileCompressed) { int[] xkey; int[] ykey; int[] zkey; fileInfo.GetEncodeKey(out xkey, out ykey, out zkey); builder.SetEncodingKey(xkey, ykey, zkey); } // Copy custom blocks foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks) { builder.AddCustomBlock(customBlock); } } // Copy dynamic item definitions bool[] floatItems = new bool[dfs1.ItemInfo.Count]; for (int i = 0; i < dfs1.ItemInfo.Count; i++) { var itemInfo = dfs1.ItemInfo[i]; // Validate item sizes var itemInfo2 = dfs2.ItemInfo[i]; if (itemInfo.ElementCount != itemInfo2.ElementCount) { throw new Exception("Dynamic items must have same size, item number " + (i + 1) + " has different sizes in the two files"); } // Validate the data type, only supporting floats and doubles. if (itemInfo.DataType == DfsSimpleType.Float) { floatItems[i] = true; } else if (itemInfo.DataType != DfsSimpleType.Double) { throw new Exception("Dynamic item must be double or float, item number " + (i + 1) + " is of type " + (itemInfo.DataType)); } builder?.AddDynamicItem(itemInfo); } // Create file builder?.CreateFile(filediff); if (builder != null) { // Copy over static items from file 1, assuming the static items of file 2 are identical IDfsStaticItem si1; while (null != (si1 = dfs1.ReadStaticItemNext())) { builder.AddStaticItem(si1); } } // Get the file DfsFile diff = builder?.GetFile(); // Write dynamic data to the file, being the difference between the two for (int i = 0; i < numTimes; i++) { for (int j = 0; j < numItems; j++) { if (floatItems[j]) { IDfsItemData <float> data1 = dfs1.ReadItemTimeStepNext() as IDfsItemData <float>; IDfsItemData <float> data2 = dfs2.ReadItemTimeStepNext() as IDfsItemData <float>; for (int k = 0; k < data1.Data.Length; k++) { float valuediff = data1.Data[k] - data2.Data[k]; data1.Data[k] = valuediff; float absValueDiff = Math.Abs(valuediff); if (absValueDiff > maxDiff[j]) { maxDiff[j] = absValueDiff; maxDiffTime[j] = i; if (firstDiffTime[j] == -1) { firstDiffTime[j] = i; } } } diff?.WriteItemTimeStepNext(data1.Time, data1.Data); } else { IDfsItemData <double> data1 = dfs1.ReadItemTimeStepNext() as IDfsItemData <double>; IDfsItemData <double> data2 = dfs2.ReadItemTimeStepNext() as IDfsItemData <double>; for (int k = 0; k < data1.Data.Length; k++) { double valuediff = data1.Data[k] - data2.Data[k]; data1.Data[k] = valuediff; double absValueDiff = Math.Abs(valuediff); if (absValueDiff > maxDiff[j]) { maxDiff[j] = absValueDiff; maxDiffTime[j] = i; if (firstDiffTime[j] == -1) { firstDiffTime[j] = i; } } } diff?.WriteItemTimeStepNext(data1.Time, data1.Data); } } } System.Console.WriteLine("Difference statistics:"); for (int i = 0; i < maxDiffTime.Length; i++) { if (maxDiffTime[i] < 0) { Console.WriteLine("{0,-30}: no difference", dfs1.ItemInfo[i].Name); } else { Console.WriteLine("{0,-30}: Max difference at timestep {1,3}: {2}. First difference at timestep {3}", dfs1.ItemInfo[i].Name, maxDiffTime[i], maxDiff[i], firstDiffTime[i]); } } dfs1.Close(); dfs2.Close(); diff?.Close(); }
public static void CreateHeader(IDfsFileInfo fi, DfsBuilder builder) { CreateHeader(fi, builder, fi.TimeAxis); }
/// <summary> /// Example of how to merge two or more dfs files. The merger is on dynamic item basis, /// i.e. add all dynamic items of a number of dfs files to a new dfs file. /// <para> /// It is assumed that all files has the same time stepping layout. It will merge /// as many time steps as the file with the least number of timesteps. /// </para> /// <para> /// If merging one of the specific types of dfs files, dfs0 or dfs1 or dfs2 or dfs3, /// the structure of the files must be identical, i.e. the sizes of the axis must equal. /// Otherwise, the outcome will not be a valid dfs0/1/2/3 file. /// </para> /// </summary> /// <param name="targetFilename">Path and name of the new file to create</param> /// <param name="sourcesFilenames">Path and name of the source dfs files</param> public static void MergeDfsFileItems(string targetFilename, IList <string> sourcesFilenames) { // List of sources to be merged - in case of more than one, just extend this. List <IDfsFile> sources = new List <IDfsFile>(); for (int i = 0; i < sourcesFilenames.Count; i++) { sources.Add(DfsFileFactory.DfsGenericOpen(sourcesFilenames[i])); } // Use the first file as skeleton for header and static items. IDfsFile source = sources[0]; IDfsFileInfo fileInfo = source.FileInfo; DfsBuilder builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion); // Set up the header builder.SetDataType(fileInfo.DataType); builder.SetGeographicalProjection(fileInfo.Projection); builder.SetTemporalAxis(fileInfo.TimeAxis); builder.SetItemStatisticsType(fileInfo.StatsType); builder.DeleteValueByte = fileInfo.DeleteValueByte; builder.DeleteValueDouble = fileInfo.DeleteValueDouble; builder.DeleteValueFloat = fileInfo.DeleteValueFloat; builder.DeleteValueInt = fileInfo.DeleteValueInt; builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt; // Transfer compression keys - if any. if (fileInfo.IsFileCompressed) { int[] xkey; int[] ykey; int[] zkey; fileInfo.GetEncodeKey(out xkey, out ykey, out zkey); builder.SetEncodingKey(xkey, ykey, zkey); } // Copy custom blocks - if any foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks) { builder.AddCustomBlock(customBlock); } int minNumTimesteps = int.MaxValue; // Copy dynamic items for all source files for (int j = 0; j < sources.Count; j++) { if (sources[j].FileInfo.TimeAxis.NumberOfTimeSteps < minNumTimesteps) { minNumTimesteps = sources[j].FileInfo.TimeAxis.NumberOfTimeSteps; } foreach (var itemInfo in sources[j].ItemInfo) { builder.AddDynamicItem(itemInfo); } } // Create file builder.CreateFile(targetFilename); // Copy static items - add only from main file IDfsStaticItem sourceStaticItem; while (null != (sourceStaticItem = source.ReadStaticItemNext())) { builder.AddStaticItem(sourceStaticItem); } // Get the file DfsFile file = builder.GetFile(); // Copy dynamic item data IDfsItemData sourceData; for (int i = 0; i < minNumTimesteps; i++) { for (int j = 0; j < sources.Count; j++) { IDfsFile sourcej = sources[j]; // Copy all items for this source for (int k = 0; k < sourcej.ItemInfo.Count; k++) { sourceData = sourcej.ReadItemTimeStepNext(); file.WriteItemTimeStepNext(sourceData.Time, sourceData.Data); } } } foreach (IDfsFile sourcej in sources) { sourcej.Close(); } file.Close(); }