private double?GetDataValue(int itemNum, int timeIdx) { if (_dfs0File.ItemInfo[itemNum].DataType == DfsSimpleType.Float) { var datastruct = (IDfsItemData <float>)_dfs0File.ReadItemTimeStep(itemNum + 1, timeIdx); // ReSharper disable CompareOfFloatsByEqualityOperator if (datastruct.Data[0] != _deleteValueFloat) // ReSharper restore CompareOfFloatsByEqualityOperator { return(Convert.ToDouble(datastruct.Data[0])); } } else if (_dfs0File.ItemInfo[itemNum].DataType == DfsSimpleType.Double) { var datastruct = (IDfsItemData <double>)_dfs0File.ReadItemTimeStep(itemNum + 1, timeIdx); // ReSharper disable CompareOfFloatsByEqualityOperator if (datastruct.Data[0] != _deleteValueDouble) // ReSharper restore CompareOfFloatsByEqualityOperator { return(datastruct.Data[0]); } } // if values are not real, return null. return(null); }
static void Main(string[] args) { int[] nodeNumber = new int[] { 899, 2686, 2856, 2866, 2331, 3806, 2231, 3831 }; IDfsFile resFile = DfsFileFactory.DfsGenericOpenEdit(@"E:\FFWS\Model\MIKEHYDRO\GBM_MIKEHYDRO.mhydro - Result Files\RiverBasin_GBM.dfs0"); IDfsFileInfo resfileInfo = resFile.FileInfo; int noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; double[] timeSpan = new double[noTimeSteps]; for (int j = 0; j < noTimeSteps; j++) { timeSpan[j] = resFile.ReadItemTimeStep(899, j).Time; } foreach (int element in nodeNumber) { IDfsItemData <float> data; float[] QSimvalues = new float[noTimeSteps]; for (int j = 0; j < noTimeSteps; j++) { data = (IDfsItemData <float>)resFile.ReadItemTimeStep(element, j); QSimvalues[j] = Convert.ToSingle(data.Data[0]); } DfsFactory factory = new DfsFactory(); string filename = @"E:\FFWS\Model\BrahmaputraHD\Boundary\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element.ToString(), element.ToString(), 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(startDate.Year, startDate.Month, startDate.Day, startDate.Hour, startDate.Minute, startDate.Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element.ToString(), eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < noTimeSteps; j++) { file.WriteItemTimeStepNext(timeSpan[j], new float[] { QSimvalues[j] }); } file.Close(); } }
/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount,itemCount+1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j+1); } dfs0File.Reset(); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } res[i, j+1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return (res); }
/// <summary> /// Update time series with a constant change factor, adding 10% to all values /// </summary> /// <param name="dfs0File">Path and name of file, e.g. Rain_instantaneous.dfs0 test file</param> /// <param name="dfs0FileNew">Name of new updated file</param> public static void UpdateDfs0Data(string dfs0File, string dfs0FileNew) { // Open source file IDfsFile source = DfsFileFactory.DfsGenericOpen(dfs0File); // Create a new file with updated rain values DfsBuilder builder = DfsBuilder.Create(source.FileInfo.FileTitle + "Updated", "MIKE SDK", 13); // Copy header info from source file to new file builder.SetDataType(source.FileInfo.DataType); builder.SetGeographicalProjection(source.FileInfo.Projection); builder.SetTemporalAxis(source.FileInfo.TimeAxis); // Copy over first item from source file to new file builder.AddDynamicItem(source.ItemInfo[0]); // Create the new file builder.CreateFile(dfs0FileNew); IDfsFile target = builder.GetFile(); // Loop over all timesteps for (int i = 0; i < source.FileInfo.TimeAxis.NumberOfTimeSteps; i++) { // Read time step for item, and extract value IDfsItemData <double> itemData = (IDfsItemData <double>)source.ReadItemTimeStep(1, i); double value = itemData.Data[0]; // Write new value to target, adding 10% to its value target.WriteItemTimeStepNext(itemData.Time, new double[] { value * 1.1 }); } source.Close(); target.Close(); }
/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount, itemCount + 1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j + 1); } dfs0File.Reset(); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } res[i, j + 1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return(res); }
private void ProcessAllTimeSteps(IDfsFile outputDfs) { var nTimes = _dfsInput.FileInfo.TimeAxis.NumberOfTimeSteps; var nItems = _dfsInput.ItemInfo.Count; List <float[]> outdatalist = new List <float[]>(); int timestep0 = 0; for (int item = 1; item <= nItems; ++item) { var indatatime = _dfsInput.ReadItemTimeStep(item, timestep0); var indata = (float[])indatatime.Data; outdatalist.Add(indata); } // from step 1 and onwards for (int timestep = 1; timestep < nTimes; timestep++) { for (int item = 1; item <= nItems; ++item) { var indatatime = _dfsInput.ReadItemTimeStep(item, timestep); var indata = (float[])indatatime.Data; // sum data outdatalist[item - 1] = outdatalist[item - 1].Zip(indata, (x, y) => x + y).ToArray(); } } for (int item = 1; item <= nItems; ++item) { outdatalist[item - 1] = outdatalist[item - 1].Select( x => x / Convert.ToSingle(nTimes)).ToArray(); outputDfs.WriteItemTimeStepNext(timestep0, outdatalist[item - 1]); } // write all steps (the same) for (int timestep = 0; timestep < nTimes; timestep++) { for (int item = 1; item <= nItems; ++item) { var indatatime = _dfsInput.ReadItemTimeStep(item, timestep); //outputDfs.WriteItemTimeStepNext(timestep, outdatalist[item - 1]); // indatatime.Time outputDfs.WriteItemTimeStep(item, timestep, indatatime.Time, outdatalist[item - 1]); // indatatime.Time } } }
public Res11(string Res11FileName) { AbsoluteFileName = System.IO.Path.GetFullPath(Res11FileName); df = DHI.Generic.MikeZero.DFS.DfsFileFactory.DfsGenericOpen(AbsoluteFileName); //For some reason the next line gives an error on one of GEUS' XP-machine. The NumberOfTimeSteps == 0 TimeSteps = df.FileInfo.TimeAxis.GetDateTimes(); for (int i = 0; i < df.FileInfo.TimeAxis.NumberOfTimeSteps; i++) { TimeSteps[i] = TimeSteps[0].AddSeconds(df.ReadItemTimeStep(1, i).TimeInSeconds(df.FileInfo.TimeAxis)); } int offset = 4; int nitems=df.ItemInfo.Count()/2; Points = new List<Res11Point>(); for (int j = 0; j < nitems; j++) { string name = System.Text.Encoding.ASCII.GetString((byte[])StaticData[offset].Data).Replace("\0",""); string topo = System.Text.Encoding.ASCII.GetString((byte[])StaticData[offset + 1].Data).Replace("\0", ""); PointType pt = PointType.Discharge; int waterlevelcounter = 0; int dischargecounter = 0; int itemcounter; IDfsDynamicItemInfo CurrentItem; for (int i = 0; i < StaticData[offset + 2].ElementCount; i++) { if (pt == PointType.Discharge) { itemcounter = waterlevelcounter; CurrentItem = df.ItemInfo[j]; waterlevelcounter++; pt = PointType.WaterLevel; } else { itemcounter = dischargecounter; CurrentItem = df.ItemInfo[j + nitems]; dischargecounter++; pt = PointType.Discharge; } double chain = (double)(float)StaticData[offset + 2].Data.GetValue(i); double x = (double)(float)StaticData[offset + 3].Data.GetValue(i); double y = (double)(float)StaticData[offset + 4].Data.GetValue(i); Points.Add(new Res11Point(this, CurrentItem, itemcounter, chain, name, topo, x, y, pt)); } int ncross = ((int[])StaticData[offset + 13].Data).Count(var => var != 0); offset = offset + 23 + 4 * ncross; } StaticData.Clear(); }
public Res11(string Res11FileName) { AbsoluteFileName = System.IO.Path.GetFullPath(Res11FileName); df = DHI.Generic.MikeZero.DFS.DfsFileFactory.DfsGenericOpen(AbsoluteFileName); //For some reason the next line gives an error on one of GEUS' XP-machine. The NumberOfTimeSteps == 0 TimeSteps = df.FileInfo.TimeAxis.GetDateTimes(); for (int i = 0; i < df.FileInfo.TimeAxis.NumberOfTimeSteps; i++) { TimeSteps[i] = TimeSteps[0].AddSeconds(df.ReadItemTimeStep(1, i).TimeInSeconds(df.FileInfo.TimeAxis)); } int offset = 4; int nitems = df.ItemInfo.Count() / 2; Points = new List <Res11Point>(); for (int j = 0; j < nitems; j++) { string name = System.Text.Encoding.ASCII.GetString((byte[])StaticData[offset].Data).Replace("\0", ""); string topo = System.Text.Encoding.ASCII.GetString((byte[])StaticData[offset + 1].Data).Replace("\0", ""); PointType pt = PointType.Discharge; int waterlevelcounter = 0; int dischargecounter = 0; int itemcounter; IDfsDynamicItemInfo CurrentItem; for (int i = 0; i < StaticData[offset + 2].ElementCount; i++) { if (pt == PointType.Discharge) { itemcounter = waterlevelcounter; CurrentItem = df.ItemInfo[j]; waterlevelcounter++; pt = PointType.WaterLevel; } else { itemcounter = dischargecounter; CurrentItem = df.ItemInfo[j + nitems]; dischargecounter++; pt = PointType.Discharge; } double chain = (double)(float)StaticData[offset + 2].Data.GetValue(i); double x = (double)(float)StaticData[offset + 3].Data.GetValue(i); double y = (double)(float)StaticData[offset + 4].Data.GetValue(i); Points.Add(new Res11Point(this, CurrentItem, itemcounter, chain, name, topo, x, y, pt)); } int ncross = ((int[])StaticData[offset + 13].Data).Count(var => var != 0); offset = offset + 23 + 4 * ncross; } StaticData.Clear(); }
/// <summary> /// Introductory example of how to load a dfs0 file. /// <para> /// The method assumes that the Rain_stepaccumulated.dfs0 test file /// is the input file. /// </para> /// </summary> /// <param name="filename">path and name of Rain_stepaccumulated.dfs0 test file</param> public static double ReadDfs0File(string filename) { // Open the file as a generic dfs file IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs0File.FileInfo; int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 19 // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "Rain" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Double ValueType valueType = dynamicItemInfo.ValueType; // StepAccumulated // Read data of first item, third time step (items start by 1, timesteps by 0), IDfsItemData datag = dfs0File.ReadItemTimeStep(1, 2); double value1 = System.Convert.ToDouble(datag.Data.GetValue(0)); // 0.36 // Assuming this is a double value, the item data object can be converted to the correct type IDfsItemData <double> data = (IDfsItemData <double>)datag; double value2 = data.Data[0]; // 0.36 // This iterates through all timesteps and items in the file // For performance reasons it is important to iterate over time steps // first and items second. double sum = 0; for (int i = 0; i < steps; i++) { for (int j = 1; j <= dfs0File.ItemInfo.Count; j++) { data = (IDfsItemData <double>)dfs0File.ReadItemTimeStep(j, i); double value = data.Data[0]; sum += value; } } dfs0File.Close(); return(sum); }
private void _ProcessAllTimeSteps(List <int> timesteps, int stride) { var numItems = _inputDfs.ItemInfo.Count; int j = -1; foreach (var timestep in timesteps) { j++; if (j % stride != 0) { continue; } for (int item = 1; item <= numItems; ++item) { var itemdata = _inputDfs.ReadItemTimeStep(item, timestep); var data = (float[])itemdata.Data; var time = itemdata.Time; _outputDfs.WriteItemTimeStepNext(time, data); } } }
private void ProcessAllTimeSteps(IDfsFile outputDfs) { var nTimes = _dfsInput.FileInfo.TimeAxis.NumberOfTimeSteps; var nItems = _dfsInput.ItemInfo.Count; for (int timestep = 0; timestep < nTimes; timestep++) { for (int item = 1; item <= nItems; ++item) { var indatatime = _dfsInput.ReadItemTimeStep(item, timestep); var indata = (float[])indatatime.Data; var time = indatatime.Time; //var outdata = indata.Select(r => r * fac + constant).ToArray(); //outputDfs.WriteItemTimeStepNext(time, outdata); } } }
/// <summary> /// Bulk read the times and data for a dfs0 file, putting it all in /// a matrix structure. /// <para> /// First column in the result are the times, then a column for each /// item in the file. There are as many rows as there are timesteps. /// All item data are converted to doubles. /// </para> /// </summary> public static double[,] ReadDfs0DataDouble(IDfsFile dfs0File) { int itemCount = dfs0File.ItemInfo.Count; int timestepCount = dfs0File.FileInfo.TimeAxis.NumberOfTimeSteps; double[,] res = new double[timestepCount, itemCount + 1]; // Preload a set of item data IDfsItemData[] itemDatas = new IDfsItemData[itemCount]; for (int j = 0; j < itemCount; j++) { itemDatas[j] = dfs0File.CreateEmptyItemData(j + 1); } dfs0File.Reset(); // Check if time axis is really a time axis, or if it is a non-time axis eumUnit timeUnit = dfs0File.FileInfo.TimeAxis.TimeUnit; bool isTimeUnit = EUMWrapper.eumUnitsEqv((int)eumUnit.eumUsec, (int)timeUnit); for (int i = 0; i < timestepCount; i++) { for (int j = 0; j < itemCount; j++) { IDfsItemData itemData = itemDatas[j]; dfs0File.ReadItemTimeStep(itemData, i); // First column is time, remaining colums are data if (j == 0) { if (isTimeUnit) { res[i, 0] = itemData.TimeInSeconds(dfs0File.FileInfo.TimeAxis); } else // not a time-unit, just return the value { res[i, 0] = itemData.Time; } } res[i, j + 1] = Convert.ToDouble(itemData.Data.GetValue(0)); } } return(res); }
private void ProcessAllTimeSteps(IDfsFile outputDfs, float fac1, float fac2) { //int numTimes = _dfsInput1.FileInfo.TimeAxis.NumberOfTimeSteps; var numItems = _dfsInput1.ItemInfo.Count; for (int timestep = 0; timestep < _nTimes; timestep++) { for (int item = 1; item <= numItems; ++item) { var datatime1 = _dfsInput1.ReadItemTimeStep(item, timestep); var data1 = (float[])datatime1.Data; var time1 = datatime1.Time; var data2 = (float[])_dfsInput2.ReadItemTimeStep(item, timestep).Data; data1 = data1.Select(r => r * fac1).ToArray(); data2 = data2.Select(r => r * fac2).ToArray(); var outdata = data1.Zip(data2, (x, y) => x + y).ToArray(); outputDfs.WriteItemTimeStepNext(time1, outdata); } } }
/// <summary> /// Find maximum value and time of maximum for a specified item in dfs0 file /// </summary> /// <param name="filename">Path and name of file, e.g. data_ndr_roese.dfs0 test file</param> /// <param name="itemNumber">Item number to find maximum for</param> public static double FindMaxValue(string filename, int itemNumber) { // Open file, using stream class Stream stream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); IDfsFile file = DfsFileFactory.DfsGenericOpen(stream); //IDfsFile file = DfsFileFactory.DfsGenericOpen(filename); // Extract Start date-time of file - assuming file is equidistant-calendar axis IDfsEqCalendarAxis timeAxis = (IDfsEqCalendarAxis)file.FileInfo.TimeAxis; DateTime startDateTime = timeAxis.StartDateTime; // Empty item data, reused when calling ReadItemTimeStep IDfsItemData <float> itemData = (IDfsItemData <float>)file.CreateEmptyItemData(itemNumber); // max value and time variables double maxValue = double.MinValue; double maxTimeSeconds = -1; DateTime maxDateTime = DateTime.MinValue; // Loop over all times in file for (int i = 0; i < file.FileInfo.TimeAxis.NumberOfTimeSteps; i++) { // Read time step for item, and extract value file.ReadItemTimeStep(itemData, i); double value = itemData.Data[0]; // Check if value is larger than maxValue if (value > maxValue) { maxValue = value; maxTimeSeconds = itemData.TimeInSeconds(timeAxis); maxDateTime = itemData.TimeAsDateTime(timeAxis); } } // Report results Console.Out.WriteLine("Max Value : {0} {1}", maxValue, file.ItemInfo[itemNumber - 1].Quantity.UnitAbbreviation); Console.Out.WriteLine("Max Value time : {0}", maxDateTime.ToString("yyyy-MM-dd HH:mm:ss")); return(maxValue); }
/// <summary> /// Introductory example of how to load a dfs0 file with a non-time axis /// as the primary axis. The important part here is to NOT call /// the <code>data.TimeInSeconds()</code>, because that will fail. /// </summary> /// <param name="filename">path and name of Added_Mass.dfs0 test file</param> public static double ReadNonTimeAxisDfs0(string filename) { // Open the file as a generic dfs file IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs0File.FileInfo; // The TimeAxis is not a time axis, but a regular axis int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 256 TimeAxisType timeAxisType = fileInfo.TimeAxis.TimeAxisType; // TimeNonEquidistant eumUnit timeUnit = fileInfo.TimeAxis.TimeUnit; // radian-per-second // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "DOF_1-1" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Float ValueType valueType = dynamicItemInfo.ValueType; // Instantaneous // This iterates through all timesteps and items in the file // For performance reasons it is important to iterate over time steps // first and items second. double sum = 0; for (int i = 0; i < steps; i++) { for (int j = 1; j <= dfs0File.ItemInfo.Count; j++) { var data = (IDfsItemData <float>)dfs0File.ReadItemTimeStep(j, i); // The Time axis value is not a time value but in radian-per-second. double axisValue = data.Time; float value = data.Data[0]; sum += value; } } dfs0File.Close(); return(sum); }
private void btnLoadHD_Click(object sender, EventArgs e) { try { label2.Visible = true; OpenFileDialog dialog = new OpenFileDialog(); dialog.Filter = "Mike HD Result Files|*.RES11"; if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel) { fileName = dialog.FileName; } ProcessStartInfo start = new ProcessStartInfo(); Process exeProcess = new Process(); start.FileName = @"C:\Program Files\DHI\2014\bin\res11read.exe"; start.Arguments = "-xy " + fileName + " " + fileName.Substring(0, fileName.Length - 6) + "_xy.txt"; exeProcess = Process.Start(start); start.CreateNoWindow = true; exeProcess.WaitForExit(); string[] riverChainageFile = File.ReadAllLines(fileName.Substring(0, fileName.Length - 6) + "_xy.txt"); char[] charSeparators = new char[] { ' ' }; StringBuilder sb = new StringBuilder(); for (int i = 19; i < riverChainageFile.Length - 3; i++) { var texts = riverChainageFile[i].Substring(24, 140).Split(charSeparators, StringSplitOptions.RemoveEmptyEntries); if (texts[2] == "2") { QItems.Add("Q," + texts[0] + "," + texts[1]); } else if (texts[2] == "0" || texts[2] == "1") { WLItems.Add("WL," + texts[0] + "," + texts[1]); } } for (int i = 0; i < WLItems.Count; i++) { sb.AppendLine(WLItems[i]); comboBox1.Items.Add(WLItems[i]); } for (int i = 0; i < QItems.Count; i++) { sb.AppendLine(QItems[i]); comboBox1.Items.Add(QItems[i]); } File.Delete(fileName.Substring(0, fileName.Length - 6) + "_xy.txt"); File.WriteAllText(fileName.Substring(0, fileName.Length - 6) + "_xy.txt", sb.ToString()); IDfsFile resFile = DfsFileFactory.DfsGenericOpen(fileName); DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; IDfsFileInfo resfileInfo = resFile.FileInfo; IDfsItemData <float> data; noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; int cx = 0; for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, 0); cx = cx + dynamicItemInfo.ElementCount; } MessageBox.Show(cx.ToString()); for (int i = 0; i < noTimeSteps; i++) { dfsDate.Add(startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time)); } for (int i = 0; i < noTimeSteps; i++) { int counter = 0; int totalNode = 0; for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); counter = dynamicItemInfo.ElementCount; for (int z = 0; z < counter; z++) { if (totalNode < comboBox1.Items.Count) { dfsData[i, totalNode] = (Convert.ToSingle(data.Data[z])); totalNode = totalNode + 1; } else { break; } } } } var filepath = fileName.Split('\\'); dfs0Path = filepath[0]; for (int i = 1; i < filepath.Length - 1; i++) { dfs0Path = dfs0Path + @"\" + filepath[i]; } label2.Text = "Loaded successfully."; } catch (Exception error) { MessageBox.Show("File have not loaded. Error: " + error.Message); } }
private void btnLoadNAM_Click(object sender, EventArgs e) { OpenFileDialog dialog = new OpenFileDialog(); dialog.Filter = "Mike NAM Result Files|*.RES11"; if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel) { fileName = dialog.FileName; } var filepath = fileName.Split('\\'); dfs0Path = filepath[0]; for (int i = 1; i < filepath.Length - 1; i++) { dfs0Path = dfs0Path + @"\" + filepath[i]; } IDfsFile resFile = DfsFileFactory.DfsGenericOpenEdit(fileName); IDfsFileInfo resfileInfo = resFile.FileInfo; DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; IDfsItemData <float> data; int noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; float[] values = new float[noTimeSteps]; for (int i = 0; i < noTimeSteps; i++) { dfsDate.Add(startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time)); } for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; string nameOftDynamicItem = dynamicItemInfo.Name; string checkname = nameOftDynamicItem.Substring(0, 6); if (checkname == "RunOff") { string filename = dfs0Path + @"\" + nameOftDynamicItem + ".dfs0"; DfsFactory factory = new DfsFactory(); DfsBuilder filecreator = DfsBuilder.Create(nameOftDynamicItem, nameOftDynamicItem, 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); //filecreator.SetTemporalAxis(factory.CreateTemporalEqCalendarAxis(eumUnit.eumUsec, new DateTime(2010, 01, 01, 06, 00, 00), 0, 10800)); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(nameOftDynamicItem, eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); for (int i = 0; i < noTimeSteps; i++) { data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); values[i] = Convert.ToSingle(data.Data[0]); file.WriteItemTimeStepNext((dfsDate[i] - dfsDate[0]).TotalSeconds, new float[] { values[i] }); } file.Close(); } } }
static void Main(string[] args) { try { StringBuilder sb = new StringBuilder(); OpenFileDialog dialog = new OpenFileDialog(); dialog.Filter = "Mike HD Result Files|*.RES11"; if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel) { IDfsFile resFile = DfsFileFactory.DfsGenericOpen(dialog.FileName); DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; IDfsFileInfo resfileInfo = resFile.FileInfo; IDfsItemData <float> data; int noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; DateTime[] dfsDate = new DateTime[noTimeSteps]; List <float> dfsWLData = new List <float>(); List <float> dfsQData = new List <float>(); for (int i = 0; i < noTimeSteps; i++) { dfsDate[i] = startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time); } int totalWNode = 0; int totalQNode = 0; for (int i = 0; i < noTimeSteps; i++) { int Wcounter = 0; int nodeWCount = 0; int Qcounter = 0; int nodeQCount = 0; for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; string nameOftDynamicItem = dynamicItemInfo.Name; string WLname = nameOftDynamicItem.Substring(0, 11); string Qname = nameOftDynamicItem.Substring(0, 9); if (WLname == "Water Level") { Wcounter = dynamicItemInfo.ElementCount; data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); for (int z = 0; z < Wcounter; z++) { dfsWLData.Add(Convert.ToSingle(data.Data[z])); nodeWCount = nodeWCount + 1; } } else if (Qname == "Discharge") { Qcounter = dynamicItemInfo.ElementCount; data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); for (int z = 0; z < Qcounter; z++) { dfsQData.Add(Convert.ToSingle(data.Data[z])); nodeQCount = nodeQCount + 1; } } } Console.WriteLine(i); totalWNode = nodeWCount; totalQNode = nodeQCount; } for (int i = 0; i < noTimeSteps; i++) { for (int j = 0; j < totalWNode; j++) { sb.AppendLine(dfsDate[i] + "," + (j + 1) + "," + dfsWLData[i * totalWNode + j]); } File.AppendAllText(dialog.FileName.Substring(0, dialog.FileName.Length - 6) + "_WL.csv", sb.ToString()); sb.Clear(); } for (int i = 0; i < noTimeSteps; i++) { for (int j = 0; j < totalQNode; j++) { sb.AppendLine(dfsDate[i] + "," + (j + 1) + "," + dfsQData[i * totalQNode + j]); } File.AppendAllText(dialog.FileName.Substring(0, dialog.FileName.Length - 6) + "_Q.csv", sb.ToString()); sb.Clear(); } Console.WriteLine("Result file processed suceesssfully."); Console.WriteLine("Press any key to exit..."); Console.ReadKey(); } } catch (Exception error) { Console.WriteLine("HD Model Result files cannot be processed due to an error. Error: " + error.Message); Console.WriteLine("Press any key to exit..."); Console.ReadKey(); } }
//private readonly List<IExchangeItem> _exchangeItems; //private readonly List<TimeSerie> _timeSeries; /// <summary> /// DFS0 reader. Gets information from the dfs file, and reads data. /// </summary> /// <param name="dfsfile">full path string to dfs0 file.</param> public Dfs0Reader(string dfsfile) : base(dfsfile) { // Set ObservationFile if (!File.Exists(dfsfile)) { throw new FileNotFoundException("\n ERROR: DFS File Not Found! \n Could not find: {0} \n", dfsfile); } // Determine Type string fileExtension = Path.GetExtension(dfsfile); if (System.String.Compare(fileExtension, ".dfs0", System.StringComparison.OrdinalIgnoreCase) == 0) { fileExtension = Path.GetExtension(dfsfile); } else { throw new Exception("\n ERROR: Observation File Type Incorrect! Expecting dfs0. \n \n"); } // Open the file as a generic dfs file _dfs0File = DfsFileFactory.DfsGenericOpen(dfsfile); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = _dfs0File.FileInfo; // Check for dfs compliance CheckDFSCompliance(); // Number of time steps (same for all items) _numTimeSteps = fileInfo.TimeAxis.NumberOfTimeSteps; // Starting from... int _firstTimeStepIndex = fileInfo.TimeAxis.FirstTimeStepIndex; // Number of variable items in dfs0 _numItems = _dfs0File.ItemInfo.Count; // Add the IDs to list (Keys) _itemIDs = new List <string>(); _quantities = new List <string>(); _xyLayerPoints = new List <IXYLayerPoint>(); foreach (var itemInfo in _dfs0File.ItemInfo) { String name = itemInfo.Name; var coords = name.Split(','); double x = Convert.ToDouble(coords[0]); double y = Convert.ToDouble(coords[1]); int zLayer = Convert.ToInt32(coords[2]); _quantities.Add(_dfs0File.FileInfo.FileTitle); _itemIDs.Add(name); _xyLayerPoints.Add(new XYLayerPoint(x, y, zLayer)); } //Gather all times _times = _dfs0File.FileInfo.TimeAxis.GetDateTimes().ToList(); _times = _timesteps; DateTime firstTime = _times[0]; if (_dfs0File.FileInfo.TimeAxis.TimeAxisType != TimeAxisType.CalendarEquidistant) { //Handle pseudo irreggular files double[] dates = new double[_numTimeSteps]; //just make 1 bigger for easy indexing for (int iTimeStep = _firstTimeStepIndex; iTimeStep < _numTimeSteps; iTimeStep++) { for (int iItem = 1; iItem < _numItems + 1; iItem++) { IDfsItemData data1 = _dfs0File.ReadItemTimeStep(iItem, iTimeStep); double offsetTime = data1.Time; if (iItem == 1) { dates[iTimeStep] = offsetTime; } else { if (Math.Abs(offsetTime - dates[iTimeStep]) > 1.0) { throw new Exception("Non Equidistant Calander is not regular"); } } } if (iTimeStep > 0) { _times[iTimeStep] = _times[0].AddSeconds(dates[iTimeStep]); } } } IList <IDfsDynamicItemInfo> infoAllTimes = _dfs0File.ItemInfo; String TimeSeriesName = infoAllTimes[0].Name; // Delelte Values _deleteValueDouble = _dfs0File.FileInfo.DeleteValueDouble; _deleteValueFloat = _dfs0File.FileInfo.DeleteValueFloat; }