/// <summary> /// Example of how to append data from one file to another. It is assumed that: /// <list type="bullet"> /// <item>The files has identical dynamic and static items</item> /// <item>The last time step of the target file is equal to the first /// timestep of the sourceFile, and therefor the first time step /// from the source file is not added to the target file</item> /// </list> /// <para> /// This example uses the generic DFS functionality, and will work for any type /// of DFS file. /// </para> /// </summary> public static void AppendToFile(string targetFile, string sourceFile) { // Open target for appending and source for reading IDfsFile target = DfsFileFactory.DfsGenericOpenAppend(targetFile); IDfsFile source = DfsFileFactory.DfsGenericOpen(sourceFile); // Time of last time step of file, in the time unit of the time axis. // This is sufficient as long as TimeAxis.StartTimeOffset equals in // source and target file (it is zero for most files) double targetEndTime = target.FileInfo.TimeAxis.TimeSpan(); // Do not add initial time step 0 of source to target file, // so go directly to time step 1 in source source.FindTimeStep(1); // Copy over data IDfsItemData sourceData2; while (null != (sourceData2 = source.ReadItemTimeStepNext())) { target.WriteItemTimeStepNext(targetEndTime + sourceData2.Time, sourceData2.Data); } // Close the files target.Close(); source.Close(); }
public void ExtractItems(List <int> items) { try { _OpenFiles(); var starttimestep = 0; var endtimestep = _inputDfs.FileInfo.TimeAxis.NumberOfTimeSteps - 1; var stride = 1; var timesteps = _GetTimeSteps(starttimestep, endtimestep); _outputDfs = _CreateFromTemplate(_inputDfs, OutputFile, timesteps, stride, items); _ProcessAllTimeSteps(timesteps, stride, items); } finally { _inputDfs.Close(); _outputDfs.Close(); } }
/// <summary> /// Example on how to modify a custom block. /// <para> /// The method assumes that a dfs2 file with the "M21_Misc" custom block, alike /// the OresundHD.dfs2 test file, is the input file. /// </para> /// </summary> /// <param name="filename">path and name of dfs2 test file</param> public static void CustomBlockModify(string filename) { IDfsFile dfsFile = DfsFileFactory.DfsGenericOpenEdit(filename); IDfsFileInfo fileInfo = dfsFile.FileInfo; IDfsCustomBlock customBlock = fileInfo.CustomBlocks[0]; customBlock[3] = 25; dfsFile.Close(); }
static void Main(string[] args) { int[] nodeNumber = new int[] { 899, 2686, 2856, 2866, 2331, 3806, 2231, 3831 }; IDfsFile resFile = DfsFileFactory.DfsGenericOpenEdit(@"E:\FFWS\Model\MIKEHYDRO\GBM_MIKEHYDRO.mhydro - Result Files\RiverBasin_GBM.dfs0"); IDfsFileInfo resfileInfo = resFile.FileInfo; int noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; double[] timeSpan = new double[noTimeSteps]; for (int j = 0; j < noTimeSteps; j++) { timeSpan[j] = resFile.ReadItemTimeStep(899, j).Time; } foreach (int element in nodeNumber) { IDfsItemData <float> data; float[] QSimvalues = new float[noTimeSteps]; for (int j = 0; j < noTimeSteps; j++) { data = (IDfsItemData <float>)resFile.ReadItemTimeStep(element, j); QSimvalues[j] = Convert.ToSingle(data.Data[0]); } DfsFactory factory = new DfsFactory(); string filename = @"E:\FFWS\Model\BrahmaputraHD\Boundary\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element.ToString(), element.ToString(), 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(startDate.Year, startDate.Month, startDate.Day, startDate.Hour, startDate.Minute, startDate.Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element.ToString(), eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < noTimeSteps; j++) { file.WriteItemTimeStepNext(timeSpan[j], new float[] { QSimvalues[j] }); } file.Close(); } }
/// <summary> /// Updates the temporal axis of a file with an <see cref="IDfsEqCalendarAxis"/> /// type time axis. /// <para> /// The method will work on a file like the OresundHD.dfs2 test file, which has /// an <see cref="IDfsEqCalendarAxis"/> type time axis. /// </para> /// </summary> /// <param name="filename">path and name of test file</param> public static void TemporalAxisModify(string filename) { IDfsFile dfsFile = DfsFileFactory.DfsGenericOpenEdit(filename); IDfsEqCalendarAxis timeAxis = (IDfsEqCalendarAxis)dfsFile.FileInfo.TimeAxis; // Update values timeAxis.FirstTimeStepIndex = 3; timeAxis.StartTimeOffset = 6; timeAxis.StartDateTime = new DateTime(2009, 2, 2, 21, 43, 00); timeAxis.TimeUnit = eumUnit.eumUminute; timeAxis.TimeStep = 1; dfsFile.Close(); }
public void ExtractDateTimes(DateTime dateTime1, DateTime dateTime2) { try { _OpenFiles(); var timesteps = _GetTimeSteps(dateTime1, dateTime2); _outputDfs = _CreateFromTemplate(_inputDfs, OutputFile, timesteps, 1); _ProcessAllTimeSteps(timesteps); } finally { _inputDfs.Close(); _outputDfs.Close(); } }
public void ExtractTimeSteps(int starttimestep, int endtimestep, int stride) { try { _OpenFiles(); var timesteps = _GetTimeSteps(starttimestep, endtimestep); _outputDfs = _CreateFromTemplate(_inputDfs, OutputFile, timesteps, stride); _ProcessAllTimeSteps(timesteps, stride); } finally { _inputDfs.Close(); _outputDfs.Close(); } }
public void ExtractTimeSteps(List <int> timesteps, int stride) { try { _OpenFiles(); _VerifyStride(timesteps.Last(), stride); _outputDfs = _CreateFromTemplate(_inputDfs, OutputFile, timesteps, stride); _ProcessAllTimeSteps(timesteps, stride); } finally { _inputDfs.Close(); _outputDfs.Close(); } }
/// <summary> /// Updates the item info. /// <para> /// The method assumes that the OresundHD.dfs2 test file /// (or preferably a copy of it) is the input file. /// </para> /// </summary> /// <param name="filename">path and name of OresundHD.dfs2 test file</param> public static void DynamicItemInfoModify(string filename) { IDfsFile dfsFile = DfsFileFactory.DfsGenericOpenEdit(filename); IDfsDynamicItemInfo itemInfo = dfsFile.ItemInfo[2]; // Update the values // old name: "Q Flux m^3/s/m". New name: "ShortD " (padded with spaces) // old quantity: (eumItem.eumIFlowFlux, eumUnit.eumUm3PerSecPerM) // old ValueType: Instantaneous itemInfo.Name = "ShortD"; itemInfo.Quantity = eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec); itemInfo.ValueType = DataValueType.MeanStepBackward; // Old reference coordinates and orientation is -1.00000002e-35f itemInfo.SetReferenceCoordinates(1, 2, 3); itemInfo.SetOrientation(4, 5, 6); dfsFile.Close(); }
/// <summary> /// Updates information in the header - <see cref="IDfsFileInfo"/>. /// <para> /// The method assumes that the OresundHD.dfs2 test file /// (or preferably a copy of it) is the input file. /// </para> /// <para> /// Strings are padded with zeros, when too short, and truncated when too long. /// </para> /// </summary> /// <param name="filename">path and name of OresundHD.dfs2 test file</param> public static void FileInfoModify(string filename) { IDfsFile dfsFile = DfsFileFactory.DfsGenericOpenEdit(filename); IDfsFileInfo fileInfo = dfsFile.FileInfo; // Modify values fileInfo.FileTitle = "ups"; fileInfo.ApplicationTitle = "Short title"; fileInfo.ApplicationVersion = 12; fileInfo.DataType = 10101; fileInfo.DeleteValueFloat = -5.5e-25f; fileInfo.DeleteValueByte = 7; fileInfo.DeleteValueDouble = -7.7e-114; fileInfo.DeleteValueInt = -123456; fileInfo.DeleteValueUnsignedInt = 123456; dfsFile.Close(); }
public void Run(string inputfile, string outputfile) { if (!File.Exists(inputfile)) { throw new Exception(String.Format("Input file {0} does not exist!", inputfile)); } try { _dfsInput = DfsFileFactory.DfsGenericOpen(inputfile); _dfsOutput = DfsOutput.CreateFromTemplate(_dfsInput, outputfile); ProcessAllTimeSteps(_dfsOutput); } finally { _dfsInput.Close(); _dfsOutput.Close(); } }
public void Run(string inputfile1, string inputfile2, double fac1, double fac2, string outputfile) { if (!File.Exists(inputfile1)) { throw new Exception(String.Format("First input file {0} does not exist!", inputfile1)); } if (!File.Exists(inputfile2)) { throw new Exception(String.Format("Second input file {0} does not exist!", inputfile1)); } var ext1 = Path.GetExtension(inputfile1).ToLower(); var ext2 = Path.GetExtension(inputfile2).ToLower(); if (ext1 != ext2) { throw new Exception("Input files must have same extension!"); } var ext_out = Path.GetExtension(outputfile).ToLower(); if (ext1 != ext_out) { throw new Exception("Input and output files must have same extension!"); } try { _dfsInput1 = DfsFileFactory.DfsGenericOpen(inputfile1); _dfsInput2 = DfsFileFactory.DfsGenericOpen(inputfile2); _VerifyInputSimilarity(_dfsInput1, _dfsInput2); _dfsOutput = DfsOutput.CreateFromTemplate(_dfsInput1, outputfile); ProcessAllTimeSteps(_dfsOutput, (float)fac1, (float)fac2); } finally { _dfsInput1.Close(); _dfsInput2.Close(); _dfsOutput.Close(); } }
/// <summary> /// Introductory example of how to load a dfs0 file. /// <para> /// The method assumes that the Rain_stepaccumulated.dfs0 test file /// is the input file. /// </para> /// </summary> /// <param name="filename">path and name of Rain_stepaccumulated.dfs0 test file</param> public static double ReadDfs0File(string filename) { // Open the file as a generic dfs file IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs0File.FileInfo; int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 19 // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "Rain" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Double ValueType valueType = dynamicItemInfo.ValueType; // StepAccumulated // Read data of first item, third time step (items start by 1, timesteps by 0), IDfsItemData datag = dfs0File.ReadItemTimeStep(1, 2); double value1 = System.Convert.ToDouble(datag.Data.GetValue(0)); // 0.36 // Assuming this is a double value, the item data object can be converted to the correct type IDfsItemData <double> data = (IDfsItemData <double>)datag; double value2 = data.Data[0]; // 0.36 // This iterates through all timesteps and items in the file // For performance reasons it is important to iterate over time steps // first and items second. double sum = 0; for (int i = 0; i < steps; i++) { for (int j = 1; j <= dfs0File.ItemInfo.Count; j++) { data = (IDfsItemData <double>)dfs0File.ReadItemTimeStep(j, i); double value = data.Data[0]; sum += value; } } dfs0File.Close(); return(sum); }
/// <summary> /// Introductory example of how to load a dfs0 file with a non-time axis /// as the primary axis. The important part here is to NOT call /// the <code>data.TimeInSeconds()</code>, because that will fail. /// </summary> /// <param name="filename">path and name of Added_Mass.dfs0 test file</param> public static double ReadNonTimeAxisDfs0(string filename) { // Open the file as a generic dfs file IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename); // Header information is contained in the IDfsFileInfo IDfsFileInfo fileInfo = dfs0File.FileInfo; // The TimeAxis is not a time axis, but a regular axis int steps = fileInfo.TimeAxis.NumberOfTimeSteps; // 256 TimeAxisType timeAxisType = fileInfo.TimeAxis.TimeAxisType; // TimeNonEquidistant eumUnit timeUnit = fileInfo.TimeAxis.TimeUnit; // radian-per-second // Information on each of the dynamic items, here the first one IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0]; string nameOfFirstDynamicItem = dynamicItemInfo.Name; // "DOF_1-1" DfsSimpleType typeOfFirstDynamicItem = dynamicItemInfo.DataType; // Float ValueType valueType = dynamicItemInfo.ValueType; // Instantaneous // This iterates through all timesteps and items in the file // For performance reasons it is important to iterate over time steps // first and items second. double sum = 0; for (int i = 0; i < steps; i++) { for (int j = 1; j <= dfs0File.ItemInfo.Count; j++) { var data = (IDfsItemData <float>)dfs0File.ReadItemTimeStep(j, i); // The Time axis value is not a time value but in radian-per-second. double axisValue = data.Time; float value = data.Data[0]; sum += value; } } dfs0File.Close(); return(sum); }
private void btnDFS0Fromlist_Click(object sender, EventArgs e) { if (fileName == "") { MessageBox.Show("No files have been selected for processing...\nPlease Load a file first."); } else { string[] requiredDFS0File = File.ReadAllLines((fileName.Substring(0, fileName.Length - 6) + ".txt")); string[] availableDFS0 = File.ReadAllLines((fileName.Substring(0, fileName.Length - 6) + "_xy.txt")); foreach (string element in requiredDFS0File) { for (int i = 0; i < availableDFS0.Length; i++) { if (element == availableDFS0[i]) { string itemType = element.Substring(0, 2); if (itemType == "WL") { DfsFactory factory = new DfsFactory(); string filename = dfs0Path + @"\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element, element, 2012); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element, eumQuantity.Create(eumItem.eumIWaterLevel, eumUnit.eumUmeter), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < dfsDate.Count; j++) { file.WriteItemTimeStepNext((dfsDate[j] - dfsDate[0]).TotalSeconds, new float[] { dfsData[j, i] }); } file.Close(); } else if (itemType == "Q,") { DfsFactory factory = new DfsFactory(); string filename = dfs0Path + @"\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element, element, 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element, eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < dfsDate.Count; j++) { file.WriteItemTimeStepNext((dfsDate[j] - dfsDate[0]).TotalSeconds, new float[] { dfsData[j, i] }); } file.Close(); } } } } MessageBox.Show("Result file processed suceesssfully."); } }
private void btnSingleDFS0_Click(object sender, EventArgs e) { try { if (fileName == "") { MessageBox.Show("No files have been selected for processing...\nPlease Load a file first."); } else { string itemType = comboBox1.SelectedItem.ToString().Substring(0, 2); if (itemType == "WL") { string element = comboBox1.SelectedItem.ToString().Substring(0, comboBox1.SelectedItem.ToString().Length - 4); DfsFactory factory = new DfsFactory(); string filename = dfs0Path + @"\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element, element, 2012); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element, eumQuantity.Create(eumItem.eumIWaterLevel, eumUnit.eumUmeter), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < dfsDate.Count; j++) { file.WriteItemTimeStepNext((dfsDate[j] - dfsDate[0]).TotalSeconds, new float[] { dfsData[j, comboBox1.SelectedIndex] }); } file.Close(); } else if (itemType == "Q,") { string element = comboBox1.SelectedItem.ToString(); DfsFactory factory = new DfsFactory(); string filename = dfs0Path + @"\" + element + ".dfs0"; DfsBuilder filecreator = DfsBuilder.Create(element, element, 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(element, eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); IDfsFileInfo fileinfo = file.FileInfo; for (int j = 0; j < dfsDate.Count; j++) { file.WriteItemTimeStepNext((dfsDate[j] - dfsDate[0]).TotalSeconds, new float[] { dfsData[j, comboBox1.SelectedIndex] }); } file.Close(); } MessageBox.Show("Result file processed suceesssfully."); } } catch (Exception error) { MessageBox.Show("HD Model Result files cannot be processed due to an error. Error: " + error.Message); } }
private void btnLoadNAM_Click(object sender, EventArgs e) { OpenFileDialog dialog = new OpenFileDialog(); dialog.Filter = "Mike NAM Result Files|*.RES11"; if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel) { fileName = dialog.FileName; } var filepath = fileName.Split('\\'); dfs0Path = filepath[0]; for (int i = 1; i < filepath.Length - 1; i++) { dfs0Path = dfs0Path + @"\" + filepath[i]; } IDfsFile resFile = DfsFileFactory.DfsGenericOpenEdit(fileName); IDfsFileInfo resfileInfo = resFile.FileInfo; DateTime[] date = resFile.FileInfo.TimeAxis.GetDateTimes(); DateTime startDate = date[0]; IDfsItemData <float> data; int noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps; float[] values = new float[noTimeSteps]; for (int i = 0; i < noTimeSteps; i++) { dfsDate.Add(startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time)); } for (int j = 0; j < resFile.ItemInfo.Count; j++) { IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j]; string nameOftDynamicItem = dynamicItemInfo.Name; string checkname = nameOftDynamicItem.Substring(0, 6); if (checkname == "RunOff") { string filename = dfs0Path + @"\" + nameOftDynamicItem + ".dfs0"; DfsFactory factory = new DfsFactory(); DfsBuilder filecreator = DfsBuilder.Create(nameOftDynamicItem, nameOftDynamicItem, 2014); filecreator.SetDataType(1); filecreator.SetGeographicalProjection(factory.CreateProjectionUndefined()); //filecreator.SetTemporalAxis(factory.CreateTemporalEqCalendarAxis(eumUnit.eumUsec, new DateTime(2010, 01, 01, 06, 00, 00), 0, 10800)); filecreator.SetTemporalAxis(factory.CreateTemporalNonEqCalendarAxis(eumUnit.eumUsec, new DateTime(dfsDate[0].Year, dfsDate[0].Month, dfsDate[0].Day, dfsDate[0].Hour, dfsDate[0].Minute, dfsDate[0].Second))); filecreator.SetItemStatisticsType(StatType.RegularStat); DfsDynamicItemBuilder item = filecreator.CreateDynamicItemBuilder(); item.Set(nameOftDynamicItem, eumQuantity.Create(eumItem.eumIDischarge, eumUnit.eumUm3PerSec), DfsSimpleType.Float); item.SetValueType(DataValueType.Instantaneous); item.SetAxis(factory.CreateAxisEqD0()); item.SetReferenceCoordinates(1f, 2f, 3f); filecreator.AddDynamicItem(item.GetDynamicItemInfo()); filecreator.CreateFile(filename); IDfsFile file = filecreator.GetFile(); for (int i = 0; i < noTimeSteps; i++) { data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i); values[i] = Convert.ToSingle(data.Data[0]); file.WriteItemTimeStepNext((dfsDate[i] - dfsDate[0]).TotalSeconds, new float[] { values[i] }); } file.Close(); } } }
/// <summary> /// Example of how to merge two or more dfs files. The merger is on dynamic item basis, /// i.e. add all dynamic items of a number of dfs files to a new dfs file. /// <para> /// It is assumed that all files has the same time stepping layout. It will merge /// as many time steps as the file with the least number of timesteps. /// </para> /// <para> /// If merging one of the specific types of dfs files, dfs0 or dfs1 or dfs2 or dfs3, /// the structure of the files must be identical, i.e. the sizes of the axis must equal. /// Otherwise, the outcome will not be a valid dfs0/1/2/3 file. /// </para> /// </summary> /// <param name="targetFilename">Path and name of the new file to create</param> /// <param name="sourcesFilenames">Path and name of the source dfs files</param> public static void MergeDfsFileItems(string targetFilename, IList <string> sourcesFilenames) { // List of sources to be merged - in case of more than one, just extend this. List <IDfsFile> sources = new List <IDfsFile>(); for (int i = 0; i < sourcesFilenames.Count; i++) { sources.Add(DfsFileFactory.DfsGenericOpen(sourcesFilenames[i])); } // Use the first file as skeleton for header and static items. IDfsFile source = sources[0]; IDfsFileInfo fileInfo = source.FileInfo; DfsBuilder builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion); // Set up the header builder.SetDataType(fileInfo.DataType); builder.SetGeographicalProjection(fileInfo.Projection); builder.SetTemporalAxis(fileInfo.TimeAxis); builder.SetItemStatisticsType(fileInfo.StatsType); builder.DeleteValueByte = fileInfo.DeleteValueByte; builder.DeleteValueDouble = fileInfo.DeleteValueDouble; builder.DeleteValueFloat = fileInfo.DeleteValueFloat; builder.DeleteValueInt = fileInfo.DeleteValueInt; builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt; // Transfer compression keys - if any. if (fileInfo.IsFileCompressed) { int[] xkey; int[] ykey; int[] zkey; fileInfo.GetEncodeKey(out xkey, out ykey, out zkey); builder.SetEncodingKey(xkey, ykey, zkey); } // Copy custom blocks - if any foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks) { builder.AddCustomBlock(customBlock); } int minNumTimesteps = int.MaxValue; // Copy dynamic items for all source files for (int j = 0; j < sources.Count; j++) { if (sources[j].FileInfo.TimeAxis.NumberOfTimeSteps < minNumTimesteps) { minNumTimesteps = sources[j].FileInfo.TimeAxis.NumberOfTimeSteps; } foreach (var itemInfo in sources[j].ItemInfo) { builder.AddDynamicItem(itemInfo); } } // Create file builder.CreateFile(targetFilename); // Copy static items - add only from main file IDfsStaticItem sourceStaticItem; while (null != (sourceStaticItem = source.ReadStaticItemNext())) { builder.AddStaticItem(sourceStaticItem); } // Get the file DfsFile file = builder.GetFile(); // Copy dynamic item data IDfsItemData sourceData; for (int i = 0; i < minNumTimesteps; i++) { for (int j = 0; j < sources.Count; j++) { IDfsFile sourcej = sources[j]; // Copy all items for this source for (int k = 0; k < sourcej.ItemInfo.Count; k++) { sourceData = sourcej.ReadItemTimeStepNext(); file.WriteItemTimeStepNext(sourceData.Time, sourceData.Data); } } } foreach (IDfsFile sourcej in sources) { sourcej.Close(); } file.Close(); }
/// <summary> /// Disposes the res11-file /// </summary> public void Dispose() { df.Close(); }
/// <summary> /// Creates a dfs0 file, with an equidistant time axis and one dynamic item. /// <para> /// It uses the generic <see cref="DfsBuilder"/>, since currently no specialized /// builder exists for the dfs0 files. /// </para> /// </summary> /// <param name="filename">Name of new file</param> /// <param name="calendarAxis">boolean specifying whether the temporal axis should be a calendar axis or a time axis</param> public static void CreateDfs0FileFromArray(string filename, bool calendarAxis) { DfsFactory factory = new DfsFactory(); DfsBuilder builder = DfsBuilder.Create("TemporalAxisTest", "dfs Timeseries Bridge", 10000); // Set up file header builder.SetDataType(1); builder.SetGeographicalProjection(factory.CreateProjectionUndefined()); if (calendarAxis) { builder.SetTemporalAxis(factory.CreateTemporalEqCalendarAxis(eumUnit.eumUsec, new DateTime(2010, 01, 04, 12, 34, 00), 4, 10)); } else { builder.SetTemporalAxis(factory.CreateTemporalEqTimeAxis(eumUnit.eumUsec, 3, 10)); } builder.SetItemStatisticsType(StatType.RegularStat); // Set up first item DfsDynamicItemBuilder item1 = builder.CreateDynamicItemBuilder(); item1.Set("WaterLevel item", eumQuantity.Create(eumItem.eumIWaterLevel, eumUnit.eumUmeter), DfsSimpleType.Float); item1.SetValueType(DataValueType.Instantaneous); item1.SetAxis(factory.CreateAxisEqD0()); item1.SetReferenceCoordinates(1f, 2f, 3f); builder.AddDynamicItem(item1.GetDynamicItemInfo()); DfsDynamicItemBuilder item2 = builder.CreateDynamicItemBuilder(); item2.Set("WaterDepth item", eumQuantity.Create(eumItem.eumIWaterDepth, eumUnit.eumUmeter), DfsSimpleType.Float); item2.SetValueType(DataValueType.Instantaneous); item2.SetAxis(factory.CreateAxisEqD0()); item2.SetReferenceCoordinates(1f, 2f, 3f); builder.AddDynamicItem(item2.GetDynamicItemInfo()); // Create file builder.CreateFile(filename); IDfsFile file = builder.GetFile(); // Time is not important, since it is equidistant double[] times = new double[10]; double[,] values = new double[10, 2]; // Write data to file values[0, 0] = 0f; // water level values[0, 1] = 100f; // water depth values[1, 0] = 1f; // water level values[1, 1] = 101f; // water depth values[2, 0] = 2f; // water level values[2, 1] = 102f; // water depth values[3, 0] = 3f; // etc... values[3, 1] = 103f; values[4, 0] = 4f; values[4, 1] = 104f; values[5, 0] = 5f; values[5, 1] = 105f; values[6, 0] = 10f; values[6, 1] = 110f; values[7, 0] = 11f; values[7, 1] = 111f; values[8, 0] = 12f; values[8, 1] = 112f; values[9, 0] = 13f; values[9, 1] = 113f; DHI.Generic.MikeZero.DFS.dfs0.Dfs0Util.WriteDfs0DataDouble(file, times, values); file.Close(); }
/// <summary> /// Creates a dfs0 file, with an equidistant time axis and one dynamic item. /// <para> /// It uses the generic <see cref="DfsBuilder"/>, since currently no specialized /// builder exists for the dfs0 files. /// </para> /// </summary> /// <param name="filename">Name of new file</param> /// <param name="calendarAxis">boolean specifying whether the temporal axis should be a calendar axis or a time axis</param> public static void CreateDfs0File(string filename, bool calendarAxis) { DfsFactory factory = new DfsFactory(); DfsBuilder builder = DfsBuilder.Create("TemporalAxisTest", "dfs Timeseries Bridge", 10000); // Set up file header builder.SetDataType(1); builder.SetGeographicalProjection(factory.CreateProjectionUndefined()); if (calendarAxis) { builder.SetTemporalAxis(factory.CreateTemporalEqCalendarAxis(eumUnit.eumUsec, new DateTime(2010, 01, 04, 12, 34, 00), 4, 10)); } else { builder.SetTemporalAxis(factory.CreateTemporalEqTimeAxis(eumUnit.eumUsec, 3, 10)); } builder.SetItemStatisticsType(StatType.RegularStat); // Set up first item DfsDynamicItemBuilder item1 = builder.CreateDynamicItemBuilder(); item1.Set("WaterLevel item", eumQuantity.Create(eumItem.eumIWaterLevel, eumUnit.eumUmeter), DfsSimpleType.Float); item1.SetValueType(DataValueType.Instantaneous); item1.SetAxis(factory.CreateAxisEqD0()); item1.SetReferenceCoordinates(1f, 2f, 3f); builder.AddDynamicItem(item1.GetDynamicItemInfo()); DfsDynamicItemBuilder item2 = builder.CreateDynamicItemBuilder(); item2.Set("WaterDepth item", eumQuantity.Create(eumItem.eumIWaterDepth, eumUnit.eumUmeter), DfsSimpleType.Float); item2.SetValueType(DataValueType.Instantaneous); item2.SetAxis(factory.CreateAxisEqD0()); item2.SetReferenceCoordinates(1f, 2f, 3f); builder.AddDynamicItem(item2.GetDynamicItemInfo()); // Create file builder.CreateFile(filename); IDfsFile file = builder.GetFile(); // Write data to file file.WriteItemTimeStepNext(0, new float[] { 0f }); // water level file.WriteItemTimeStepNext(0, new float[] { 100f }); // water depth file.WriteItemTimeStepNext(0, new float[] { 1f }); // water level file.WriteItemTimeStepNext(0, new float[] { 101f }); // water depth file.WriteItemTimeStepNext(0, new float[] { 2f }); // water level file.WriteItemTimeStepNext(0, new float[] { 102f }); // water depth file.WriteItemTimeStepNext(0, new float[] { 3f }); // etc... file.WriteItemTimeStepNext(0, new float[] { 103f }); file.WriteItemTimeStepNext(0, new float[] { 4f }); file.WriteItemTimeStepNext(0, new float[] { 104f }); file.WriteItemTimeStepNext(0, new float[] { 5f }); file.WriteItemTimeStepNext(0, new float[] { 105f }); file.WriteItemTimeStepNext(0, new float[] { 10f }); file.WriteItemTimeStepNext(0, new float[] { 110f }); file.WriteItemTimeStepNext(0, new float[] { 11f }); file.WriteItemTimeStepNext(0, new float[] { 111f }); file.WriteItemTimeStepNext(0, new float[] { 12f }); file.WriteItemTimeStepNext(0, new float[] { 112f }); file.WriteItemTimeStepNext(0, new float[] { 13f }); file.WriteItemTimeStepNext(0, new float[] { 113f }); file.Close(); }
/// <summary> /// Create a new file, being the difference of two files. /// <para> /// The two input files must be equal in structure, e.g. coming /// from the same simulation but giving different results. /// Header and static data must be identical, only difference /// must be in values of the dynamic data. /// </para> /// </summary> public static void CreateDiffFile(string file1, string file2, string filediff = null) { IDfsFile dfs1 = DfsFileFactory.DfsGenericOpen(file1); IDfsFile dfs2 = DfsFileFactory.DfsGenericOpen(file2); // Validate that it has the same number of items. if (dfs1.ItemInfo.Count != dfs2.ItemInfo.Count) { throw new Exception("Number of dynamic items does not match"); } int numItems = dfs1.ItemInfo.Count; // In case number of time steps does not match, take the smallest. int numTimes = dfs1.FileInfo.TimeAxis.NumberOfTimeSteps; if (numTimes > dfs2.FileInfo.TimeAxis.NumberOfTimeSteps) { numTimes = dfs2.FileInfo.TimeAxis.NumberOfTimeSteps; Console.Out.WriteLine("Number of time steps does not match, using the smallest number"); } // For recording max difference for every item double[] maxDiff = new double[dfs1.ItemInfo.Count]; // Index in time (index) of maximum and first difference. -1 if no difference int[] maxDiffTime = new int[dfs1.ItemInfo.Count]; int[] firstDiffTime = new int[dfs1.ItemInfo.Count]; for (int i = 0; i < dfs1.ItemInfo.Count; i++) { maxDiffTime[i] = -1; firstDiffTime[i] = -1; } // Copy over info from the first file, assuming the second file contains the same data. IDfsFileInfo fileInfo = dfs1.FileInfo; DfsBuilder builder = null; if (!string.IsNullOrEmpty(filediff)) { builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion); // Set up the header builder.SetDataType(fileInfo.DataType); builder.SetGeographicalProjection(fileInfo.Projection); builder.SetTemporalAxis(fileInfo.TimeAxis); builder.SetItemStatisticsType(fileInfo.StatsType); builder.DeleteValueByte = fileInfo.DeleteValueByte; builder.DeleteValueDouble = fileInfo.DeleteValueDouble; builder.DeleteValueFloat = fileInfo.DeleteValueFloat; builder.DeleteValueInt = fileInfo.DeleteValueInt; builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt; // Transfer compression keys. if (fileInfo.IsFileCompressed) { int[] xkey; int[] ykey; int[] zkey; fileInfo.GetEncodeKey(out xkey, out ykey, out zkey); builder.SetEncodingKey(xkey, ykey, zkey); } // Copy custom blocks foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks) { builder.AddCustomBlock(customBlock); } } // Copy dynamic item definitions bool[] floatItems = new bool[dfs1.ItemInfo.Count]; for (int i = 0; i < dfs1.ItemInfo.Count; i++) { var itemInfo = dfs1.ItemInfo[i]; // Validate item sizes var itemInfo2 = dfs2.ItemInfo[i]; if (itemInfo.ElementCount != itemInfo2.ElementCount) { throw new Exception("Dynamic items must have same size, item number " + (i + 1) + " has different sizes in the two files"); } // Validate the data type, only supporting floats and doubles. if (itemInfo.DataType == DfsSimpleType.Float) { floatItems[i] = true; } else if (itemInfo.DataType != DfsSimpleType.Double) { throw new Exception("Dynamic item must be double or float, item number " + (i + 1) + " is of type " + (itemInfo.DataType)); } builder?.AddDynamicItem(itemInfo); } // Create file builder?.CreateFile(filediff); if (builder != null) { // Copy over static items from file 1, assuming the static items of file 2 are identical IDfsStaticItem si1; while (null != (si1 = dfs1.ReadStaticItemNext())) { builder.AddStaticItem(si1); } } // Get the file DfsFile diff = builder?.GetFile(); // Write dynamic data to the file, being the difference between the two for (int i = 0; i < numTimes; i++) { for (int j = 0; j < numItems; j++) { if (floatItems[j]) { IDfsItemData <float> data1 = dfs1.ReadItemTimeStepNext() as IDfsItemData <float>; IDfsItemData <float> data2 = dfs2.ReadItemTimeStepNext() as IDfsItemData <float>; for (int k = 0; k < data1.Data.Length; k++) { float valuediff = data1.Data[k] - data2.Data[k]; data1.Data[k] = valuediff; float absValueDiff = Math.Abs(valuediff); if (absValueDiff > maxDiff[j]) { maxDiff[j] = absValueDiff; maxDiffTime[j] = i; if (firstDiffTime[j] == -1) { firstDiffTime[j] = i; } } } diff?.WriteItemTimeStepNext(data1.Time, data1.Data); } else { IDfsItemData <double> data1 = dfs1.ReadItemTimeStepNext() as IDfsItemData <double>; IDfsItemData <double> data2 = dfs2.ReadItemTimeStepNext() as IDfsItemData <double>; for (int k = 0; k < data1.Data.Length; k++) { double valuediff = data1.Data[k] - data2.Data[k]; data1.Data[k] = valuediff; double absValueDiff = Math.Abs(valuediff); if (absValueDiff > maxDiff[j]) { maxDiff[j] = absValueDiff; maxDiffTime[j] = i; if (firstDiffTime[j] == -1) { firstDiffTime[j] = i; } } } diff?.WriteItemTimeStepNext(data1.Time, data1.Data); } } } System.Console.WriteLine("Difference statistics:"); for (int i = 0; i < maxDiffTime.Length; i++) { if (maxDiffTime[i] < 0) { Console.WriteLine("{0,-30}: no difference", dfs1.ItemInfo[i].Name); } else { Console.WriteLine("{0,-30}: Max difference at timestep {1,3}: {2}. First difference at timestep {3}", dfs1.ItemInfo[i].Name, maxDiffTime[i], maxDiff[i], firstDiffTime[i]); } } dfs1.Close(); dfs2.Close(); diff?.Close(); }
/// <summary> /// Example of how to copy a Dfs file. /// <para> /// This example is intended to show how to generically copy a file. In /// case a copy with modified data is required, this could be used as a base /// for the copy. /// </para> /// </summary> /// <param name="sourceFilename">Path and name of the source dfs file</param> /// <param name="filename">Path and name of the new file to create</param> public static void CopyDfsFile(string sourceFilename, string filename) { IDfsFile source = DfsFileFactory.DfsGenericOpen(sourceFilename); IDfsFileInfo fileInfo = source.FileInfo; DfsBuilder builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion); // Set up the header builder.SetDataType(fileInfo.DataType); builder.SetGeographicalProjection(fileInfo.Projection); builder.SetTemporalAxis(fileInfo.TimeAxis); builder.SetItemStatisticsType(fileInfo.StatsType); builder.DeleteValueByte = fileInfo.DeleteValueByte; builder.DeleteValueDouble = fileInfo.DeleteValueDouble; builder.DeleteValueFloat = fileInfo.DeleteValueFloat; builder.DeleteValueInt = fileInfo.DeleteValueInt; builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt; // Transfer compression keys - if any. if (fileInfo.IsFileCompressed) { int[] xkey; int[] ykey; int[] zkey; fileInfo.GetEncodeKey(out xkey, out ykey, out zkey); builder.SetEncodingKey(xkey, ykey, zkey); } // Copy custom blocks - if any foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks) { builder.AddCustomBlock(customBlock); } // Copy dynamic items foreach (var itemInfo in source.ItemInfo) { builder.AddDynamicItem(itemInfo); } // Create file builder.CreateFile(filename); // Copy static items IDfsStaticItem sourceStaticItem; while (null != (sourceStaticItem = source.ReadStaticItemNext())) { builder.AddStaticItem(sourceStaticItem); } // Get the file DfsFile file = builder.GetFile(); // Copy dynamic item data IDfsItemData sourceData; while (null != (sourceData = source.ReadItemTimeStepNext())) { file.WriteItemTimeStepNext(sourceData.Time, sourceData.Data); } source.Close(); file.Close(); }
public bool GenerateWebTideNode(TVFileModel TVFileModelBC, int WebTideNodeNumb, List <Coord> CoordList, List <TVFileModel> tvFileModelList, int BoundaryConditionCodeNumber, List <List <WaterLevelResult> > AllWLResults, List <IEnumerable <CurrentResult> > AllCurrentResults) { List <eumItem> eumItemList = new List <eumItem>(); DfsFactory factory = new DfsFactory(); IDfsFile dfsOldFile = DfsFileFactory.DfsGenericOpen(TVFileModelBC.ServerFilePath + TVFileModelBC.ServerFileName); DfsBuilder dfsNewFile = DfsBuilder.Create(dfsOldFile.FileInfo.FileTitle, dfsOldFile.FileInfo.ApplicationTitle, dfsOldFile.FileInfo.ApplicationVersion); double WebTideStepsInMinutes = ((double)((IDfsEqCalendarAxis)((dfsOldFile.FileInfo).TimeAxis)).TimeStep / 60); DateTime?dateTimeTemp = null; int? NumberOfTimeSteps = null; int? TimeStepInterval = null; using (PFS pfs = new PFS(base.fi)) { dateTimeTemp = pfs.GetVariableDateTime("FemEngineHD/TIME", "start_time"); if (dateTimeTemp == null) { dfsOldFile.Close(); return(false); } NumberOfTimeSteps = pfs.GetVariable <int>("FemEngineHD/TIME", "number_of_time_steps", 1); if (NumberOfTimeSteps == null) { dfsOldFile.Close(); return(false); } TimeStepInterval = pfs.GetVariable <int>("FemEngineHD/TIME", "time_step_interval", 1); if (TimeStepInterval == null) { dfsOldFile.Close(); return(false); } } DateTime StartDate = ((DateTime)dateTimeTemp).AddHours(-1); DateTime EndDate = ((DateTime)dateTimeTemp).AddSeconds((int)NumberOfTimeSteps * (int)TimeStepInterval).AddHours(1); dfsNewFile.SetDataType(dfsOldFile.FileInfo.DataType); dfsNewFile.SetGeographicalProjection(dfsOldFile.FileInfo.Projection); dfsNewFile.SetTemporalAxis(factory.CreateTemporalEqCalendarAxis(eumUnit.eumUsec, StartDate, 0, WebTideStepsInMinutes * 60)); dfsNewFile.SetItemStatisticsType(StatType.RegularStat); foreach (IDfsDynamicItemInfo di in dfsOldFile.ItemInfo) { DfsDynamicItemBuilder ddib = dfsNewFile.CreateDynamicItemBuilder(); ddib.Set(di.Name, eumQuantity.Create(di.Quantity.Item, di.Quantity.Unit), di.DataType); ddib.SetValueType(di.ValueType); ddib.SetAxis(factory.CreateAxisEqD1(eumUnit.eumUsec, CoordList.Count, 0, 1)); ddib.SetReferenceCoordinates(di.ReferenceCoordinateX, di.ReferenceCoordinateY, di.ReferenceCoordinateZ); dfsNewFile.AddDynamicItem(ddib.GetDynamicItemInfo()); eumItemList.Add(di.Quantity.Item); } dfsOldFile.Close(); string[] NewFileErrors = dfsNewFile.Validate(); StringBuilder sbErr = new StringBuilder(); foreach (string s in NewFileErrors) { sbErr.AppendLine(s); } if (NewFileErrors.Count() > 0) { ErrorMessage = string.Format(CSSPDHIRes.CouldNotCreate_, TVFileModelBC.ServerFileName.Replace(".dfs0", "dfs1")); OnCSSPDHIChanged(new CSSPDHIEventArgs(new CSSPDHIMessage("Error", -1, false, ErrorMessage))); return(false); } string NewFileNameBC = TVFileModelBC.ServerFileName; if (CoordList.Count == 0) { ErrorMessage = CSSPDHIRes.NumberOfWebTideNodesIsZero; OnCSSPDHIChanged(new CSSPDHIEventArgs(new CSSPDHIMessage("Error", -1, false, ErrorMessage))); return(false); } if (eumItemList.Count == 1) { if (eumItemList[0] == eumItem.eumIWaterLevel || eumItemList[0] == eumItem.eumIWaterDepth) { List <WaterLevelResult> WLResults = null; dfsNewFile.CreateFile(TVFileModelBC.ServerFilePath + NewFileNameBC); IDfsFile file = dfsNewFile.GetFile(); for (int i = 0; i < WLResults.ToList().Count; i++) { float[] floatArray = new float[AllWLResults.Count]; for (int j = 0; j < AllWLResults.Count; j++) { floatArray[j] = ((float)((List <WaterLevelResult>)AllWLResults[j].ToList())[i].WaterLevel); } file.WriteItemTimeStepNext(0, floatArray); // water level array } file.Close(); } else { ErrorMessage = string.Format(CSSPDHIRes.FileContainsOneParamButItsNotOfTypeWLOrWDItIs_, eumItemList[0].ToString()); OnCSSPDHIChanged(new CSSPDHIEventArgs(new CSSPDHIMessage("Error", -1, false, ErrorMessage))); return(false); } } else if (eumItemList.Count == 2) { if (eumItemList[0] == eumItem.eumIuVelocity && eumItemList[1] == eumItem.eumIvVelocity) { // read web tide for the required time List <CurrentResult> CurrentResults = null; dfsNewFile.CreateFile(TVFileModelBC.ServerFilePath + NewFileNameBC); IDfsFile file = dfsNewFile.GetFile(); for (int i = 0; i < CurrentResults.ToList().Count; i++) { float[] floatArrayX = new float[AllCurrentResults.Count]; float[] floatArrayY = new float[AllCurrentResults.Count]; for (int j = 0; j < AllCurrentResults.Count; j++) { floatArrayX[j] = ((float)((List <CurrentResult>)AllCurrentResults[j].ToList())[i].x_velocity); floatArrayY[j] = ((float)((List <CurrentResult>)AllCurrentResults[j].ToList())[i].y_velocity); } file.WriteItemTimeStepNext(0, floatArrayX); // Current xVelocity file.WriteItemTimeStepNext(0, floatArrayY); // Current yVelocity } file.Close(); } else { ErrorMessage = string.Format(CSSPDHIRes.FileContains2ParamButItsNotOfUVAndVVItIs_And_, eumItemList[0].ToString(), eumItemList[1].ToString()); OnCSSPDHIChanged(new CSSPDHIEventArgs(new CSSPDHIMessage("Error", -1, false, ErrorMessage))); return(false); } } else { // this is not a file that is used for Water Level or Currents } return(false); }