Пример #1
0
        public void Run(string inputfile, double fac, double constant, string outputfile)
        {
            if (!File.Exists(inputfile))
            {
                throw new Exception(String.Format("Input file {0} does not exist!", inputfile));
            }

            var ext1 = Path.GetExtension(inputfile).ToLower();
            var ext2 = Path.GetExtension(outputfile).ToLower();

            if (ext1 != ext2)
            {
                throw new Exception("Input and output files must have same extension!");
            }

            try
            {
                _dfsInput  = DfsFileFactory.DfsGenericOpen(inputfile);
                _dfsOutput = DfsOutput.CreateFromTemplate(_dfsInput, outputfile);

                ProcessAllTimeSteps(_dfsOutput, (float)fac, (float)constant);
            }
            finally
            {
                _dfsInput.Close();
                _dfsOutput.Close();
            }
        }
Пример #2
0
        public void Run(string inputfile1, string inputfile2, double fac1, double fac2, string outputfile)
        {
            if (!File.Exists(inputfile1))
            {
                throw new Exception(String.Format("First input file {0} does not exist!", inputfile1));
            }
            if (!File.Exists(inputfile2))
            {
                throw new Exception(String.Format("Second input file {0} does not exist!", inputfile1));
            }

            try
            {
                _dfsInput1 = DfsFileFactory.DfsGenericOpen(inputfile1);
                _dfsInput2 = DfsFileFactory.DfsGenericOpen(inputfile2);
                _VerifyInputSimilarity(_dfsInput1, _dfsInput2);
                _dfsOutput = DfsOutput.CreateFromTemplate(_dfsInput1, outputfile);

                ProcessAllTimeSteps(_dfsOutput, (float)fac1, (float)fac2);
            }
            finally
            {
                _dfsInput1.Close();
                _dfsInput2.Close();
                _dfsOutput.Close();
            }
        }
Пример #3
0
        /// <summary>
        /// Example of how to append data from one file to another. It is assumed that:
        /// <list type="bullet">
        /// <item>The files has identical dynamic and static items</item>
        /// <item>The last time step of the target file is equal to the first
        ///       timestep of the sourceFile, and therefor the first time step
        ///       from the source file is not added to the target file</item>
        /// </list>
        /// <para>
        /// This example uses the generic DFS functionality, and will work for any type
        /// of DFS file.
        /// </para>
        /// </summary>
        public static void AppendToFile(string targetFile, string sourceFile)
        {
            // Open target for appending and source for reading
            IDfsFile target = DfsFileFactory.DfsGenericOpenAppend(targetFile);
            IDfsFile source = DfsFileFactory.DfsGenericOpen(sourceFile);

            // Time of last time step of file, in the time unit of the time axis.
            // This is sufficient as long as TimeAxis.StartTimeOffset equals in
            // source and target file (it is zero for most files)
            double targetEndTime = target.FileInfo.TimeAxis.TimeSpan();

            // Do not add initial time step 0 of source to target file,
            // so go directly to time step 1 in source
            source.FindTimeStep(1);

            // Copy over data
            IDfsItemData sourceData2;

            while (null != (sourceData2 = source.ReadItemTimeStepNext()))
            {
                target.WriteItemTimeStepNext(targetEndTime + sourceData2.Time, sourceData2.Data);
            }

            // Close the files
            target.Close();
            source.Close();
        }
Пример #4
0
        /// <summary>
        /// Update time series with a constant change factor, adding 10% to all values
        /// </summary>
        /// <param name="dfs0File">Path and name of file, e.g. Rain_instantaneous.dfs0 test file</param>
        /// <param name="dfs0FileNew">Name of new updated file</param>
        public static void UpdateDfs0Data(string dfs0File, string dfs0FileNew)
        {
            // Open source file
            IDfsFile source = DfsFileFactory.DfsGenericOpen(dfs0File);

            // Create a new file with updated rain values
            DfsBuilder builder = DfsBuilder.Create(source.FileInfo.FileTitle + "Updated", "MIKE SDK", 13);

            // Copy header info from source file to new file
            builder.SetDataType(source.FileInfo.DataType);
            builder.SetGeographicalProjection(source.FileInfo.Projection);
            builder.SetTemporalAxis(source.FileInfo.TimeAxis);

            // Copy over first item from source file to new file
            builder.AddDynamicItem(source.ItemInfo[0]);

            // Create the new file
            builder.CreateFile(dfs0FileNew);
            IDfsFile target = builder.GetFile();

            // Loop over all timesteps
            for (int i = 0; i < source.FileInfo.TimeAxis.NumberOfTimeSteps; i++)
            {
                // Read time step for item, and extract value
                IDfsItemData <double> itemData = (IDfsItemData <double>)source.ReadItemTimeStep(1, i);
                double value = itemData.Data[0];
                // Write new value to target, adding 10% to its value
                target.WriteItemTimeStepNext(itemData.Time, new double[] { value * 1.1 });
            }

            source.Close();
            target.Close();
        }
Пример #5
0
        public static DfsFile CreateFromTemplate(string dfsTemplate, string outputfile)
        {
            var iDfsTemplate = DfsFileFactory.DfsGenericOpen(dfsTemplate);
            var outputDfs    = CreateFromTemplate(iDfsTemplate, outputfile);

            iDfsTemplate.Close();
            return(outputDfs);
        }
Пример #6
0
        private void _OpenFiles()
        {
            if (!File.Exists(InputFile))
            {
                throw new Exception(String.Format("Input file {0} does not exist!", InputFile));
            }

            _inputDfs = DfsFileFactory.DfsGenericOpen(InputFile);
        }
Пример #7
0
        public double GetWebTideStepsInMinutes()
        {
            IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(fi.FullName);

            DfsBuilder dfsNewFile = DfsBuilder.Create(dfs0File.FileInfo.FileTitle, dfs0File.FileInfo.ApplicationTitle, dfs0File.FileInfo.ApplicationVersion);

            double WebTideStepsInMinutes = ((double)((IDfsEqCalendarAxis)((dfs0File.FileInfo).TimeAxis)).TimeStep / 60);

            return(WebTideStepsInMinutes);
        }
Пример #8
0
        public void Run(string inputfile, string outputfile)
        {
            if (!File.Exists(inputfile))
            {
                throw new Exception(String.Format("Input file {0} does not exist!", inputfile));
            }

            try
            {
                _dfsInput  = DfsFileFactory.DfsGenericOpen(inputfile);
                _dfsOutput = DfsOutput.CreateFromTemplate(_dfsInput, outputfile);

                ProcessAllTimeSteps(_dfsOutput);
            }
            finally
            {
                _dfsInput.Close();
                _dfsOutput.Close();
            }
        }
Пример #9
0
        public void Run(string inputfile1, string inputfile2, double fac1, double fac2, string outputfile)
        {
            if (!File.Exists(inputfile1))
            {
                throw new Exception(String.Format("First input file {0} does not exist!", inputfile1));
            }
            if (!File.Exists(inputfile2))
            {
                throw new Exception(String.Format("Second input file {0} does not exist!", inputfile1));
            }

            var ext1 = Path.GetExtension(inputfile1).ToLower();
            var ext2 = Path.GetExtension(inputfile2).ToLower();

            if (ext1 != ext2)
            {
                throw new Exception("Input files must have same extension!");
            }
            var ext_out = Path.GetExtension(outputfile).ToLower();

            if (ext1 != ext_out)
            {
                throw new Exception("Input and output files must have same extension!");
            }

            try
            {
                _dfsInput1 = DfsFileFactory.DfsGenericOpen(inputfile1);
                _dfsInput2 = DfsFileFactory.DfsGenericOpen(inputfile2);
                _VerifyInputSimilarity(_dfsInput1, _dfsInput2);
                _dfsOutput = DfsOutput.CreateFromTemplate(_dfsInput1, outputfile);

                ProcessAllTimeSteps(_dfsOutput, (float)fac1, (float)fac2);
            }
            finally
            {
                _dfsInput1.Close();
                _dfsInput2.Close();
                _dfsOutput.Close();
            }
        }
Пример #10
0
        /// <summary>
        /// Introductory example of how to load a dfs0 file.
        /// <para>
        /// The method assumes that the Rain_stepaccumulated.dfs0 test file
        /// is the input file.
        /// </para>
        /// </summary>
        /// <param name="filename">path and name of Rain_stepaccumulated.dfs0 test file</param>
        public static double ReadDfs0File(string filename)
        {
            // Open the file as a generic dfs file
            IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename);

            // Header information is contained in the IDfsFileInfo
            IDfsFileInfo fileInfo = dfs0File.FileInfo;
            int          steps    = fileInfo.TimeAxis.NumberOfTimeSteps; // 19

            // Information on each of the dynamic items, here the first one
            IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0];
            string        nameOfFirstDynamicItem      = dynamicItemInfo.Name;     // "Rain"
            DfsSimpleType typeOfFirstDynamicItem      = dynamicItemInfo.DataType; // Double
            ValueType     valueType = dynamicItemInfo.ValueType;                  // StepAccumulated

            // Read data of first item, third time step (items start by 1, timesteps by 0),
            IDfsItemData datag  = dfs0File.ReadItemTimeStep(1, 2);
            double       value1 = System.Convert.ToDouble(datag.Data.GetValue(0)); // 0.36
            // Assuming this is a double value, the item data object can be converted to the correct type
            IDfsItemData <double> data = (IDfsItemData <double>)datag;
            double value2 = data.Data[0];                                  // 0.36

            // This iterates through all timesteps and items in the file
            // For performance reasons it is important to iterate over time steps
            // first and items second.
            double sum = 0;

            for (int i = 0; i < steps; i++)
            {
                for (int j = 1; j <= dfs0File.ItemInfo.Count; j++)
                {
                    data = (IDfsItemData <double>)dfs0File.ReadItemTimeStep(j, i);
                    double value = data.Data[0];
                    sum += value;
                }
            }

            dfs0File.Close();
            return(sum);
        }
Пример #11
0
        /// <summary>
        /// Find maximum value and time of maximum for a specified item in dfs0 file
        /// </summary>
        /// <param name="filename">Path and name of file, e.g. data_ndr_roese.dfs0 test file</param>
        /// <param name="itemNumber">Item number to find maximum for</param>
        public static double FindMaxValue(string filename, int itemNumber)
        {
            // Open file, using stream class
            Stream   stream = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite);
            IDfsFile file   = DfsFileFactory.DfsGenericOpen(stream);
            //IDfsFile file = DfsFileFactory.DfsGenericOpen(filename);

            // Extract Start date-time of file - assuming file is equidistant-calendar axis
            IDfsEqCalendarAxis timeAxis      = (IDfsEqCalendarAxis)file.FileInfo.TimeAxis;
            DateTime           startDateTime = timeAxis.StartDateTime;

            // Empty item data, reused when calling ReadItemTimeStep
            IDfsItemData <float> itemData = (IDfsItemData <float>)file.CreateEmptyItemData(itemNumber);

            // max value and time variables
            double   maxValue       = double.MinValue;
            double   maxTimeSeconds = -1;
            DateTime maxDateTime    = DateTime.MinValue;

            // Loop over all times in file
            for (int i = 0; i < file.FileInfo.TimeAxis.NumberOfTimeSteps; i++)
            {
                // Read time step for item, and extract value
                file.ReadItemTimeStep(itemData, i);
                double value = itemData.Data[0];
                // Check if value is larger than maxValue
                if (value > maxValue)
                {
                    maxValue       = value;
                    maxTimeSeconds = itemData.TimeInSeconds(timeAxis);
                    maxDateTime    = itemData.TimeAsDateTime(timeAxis);
                }
            }
            // Report results
            Console.Out.WriteLine("Max Value      : {0} {1}", maxValue, file.ItemInfo[itemNumber - 1].Quantity.UnitAbbreviation);
            Console.Out.WriteLine("Max Value time : {0}", maxDateTime.ToString("yyyy-MM-dd HH:mm:ss"));
            return(maxValue);
        }
Пример #12
0
        /// <summary>
        /// Introductory example of how to load a dfs0 file with a non-time axis
        /// as the primary axis. The important part here is to NOT call
        /// the <code>data.TimeInSeconds()</code>, because that will fail.
        /// </summary>
        /// <param name="filename">path and name of Added_Mass.dfs0 test file</param>
        public static double ReadNonTimeAxisDfs0(string filename)
        {
            // Open the file as a generic dfs file
            IDfsFile dfs0File = DfsFileFactory.DfsGenericOpen(filename);

            // Header information is contained in the IDfsFileInfo
            IDfsFileInfo fileInfo = dfs0File.FileInfo;
            // The TimeAxis is not a time axis, but a regular axis
            int          steps        = fileInfo.TimeAxis.NumberOfTimeSteps; // 256
            TimeAxisType timeAxisType = fileInfo.TimeAxis.TimeAxisType;      // TimeNonEquidistant
            eumUnit      timeUnit     = fileInfo.TimeAxis.TimeUnit;          // radian-per-second

            // Information on each of the dynamic items, here the first one
            IDfsSimpleDynamicItemInfo dynamicItemInfo = dfs0File.ItemInfo[0];
            string        nameOfFirstDynamicItem      = dynamicItemInfo.Name;     // "DOF_1-1"
            DfsSimpleType typeOfFirstDynamicItem      = dynamicItemInfo.DataType; // Float
            ValueType     valueType = dynamicItemInfo.ValueType;                  // Instantaneous

            // This iterates through all timesteps and items in the file
            // For performance reasons it is important to iterate over time steps
            // first and items second.
            double sum = 0;

            for (int i = 0; i < steps; i++)
            {
                for (int j = 1; j <= dfs0File.ItemInfo.Count; j++)
                {
                    var data = (IDfsItemData <float>)dfs0File.ReadItemTimeStep(j, i);
                    // The Time axis value is not a time value but in radian-per-second.
                    double axisValue = data.Time;
                    float  value     = data.Data[0];
                    sum += value;
                }
            }

            dfs0File.Close();
            return(sum);
        }
Пример #13
0
        private void btnLoadHD_Click(object sender, EventArgs e)
        {
            try
            {
                label2.Visible = true;
                OpenFileDialog dialog = new OpenFileDialog();
                dialog.Filter = "Mike HD Result Files|*.RES11";

                if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel)
                {
                    fileName = dialog.FileName;
                }

                ProcessStartInfo start      = new ProcessStartInfo();
                Process          exeProcess = new Process();
                start.FileName       = @"C:\Program Files\DHI\2014\bin\res11read.exe";
                start.Arguments      = "-xy " + fileName + " " + fileName.Substring(0, fileName.Length - 6) + "_xy.txt";
                exeProcess           = Process.Start(start);
                start.CreateNoWindow = true;
                exeProcess.WaitForExit();

                string[] riverChainageFile = File.ReadAllLines(fileName.Substring(0, fileName.Length - 6) + "_xy.txt");
                char[]   charSeparators    = new char[] { ' ' };

                StringBuilder sb = new StringBuilder();
                for (int i = 19; i < riverChainageFile.Length - 3; i++)
                {
                    var texts = riverChainageFile[i].Substring(24, 140).Split(charSeparators, StringSplitOptions.RemoveEmptyEntries);
                    if (texts[2] == "2")
                    {
                        QItems.Add("Q," + texts[0] + "," + texts[1]);
                    }
                    else if (texts[2] == "0" || texts[2] == "1")
                    {
                        WLItems.Add("WL," + texts[0] + "," + texts[1]);
                    }
                }
                for (int i = 0; i < WLItems.Count; i++)
                {
                    sb.AppendLine(WLItems[i]);
                    comboBox1.Items.Add(WLItems[i]);
                }

                for (int i = 0; i < QItems.Count; i++)
                {
                    sb.AppendLine(QItems[i]);
                    comboBox1.Items.Add(QItems[i]);
                }
                File.Delete(fileName.Substring(0, fileName.Length - 6) + "_xy.txt");
                File.WriteAllText(fileName.Substring(0, fileName.Length - 6) + "_xy.txt", sb.ToString());
                IDfsFile             resFile     = DfsFileFactory.DfsGenericOpen(fileName);
                DateTime[]           date        = resFile.FileInfo.TimeAxis.GetDateTimes();
                DateTime             startDate   = date[0];
                IDfsFileInfo         resfileInfo = resFile.FileInfo;
                IDfsItemData <float> data;
                noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps;

                int cx = 0;
                for (int j = 0; j < resFile.ItemInfo.Count; j++)
                {
                    IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j];
                    data = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, 0);
                    cx   = cx + dynamicItemInfo.ElementCount;
                }
                MessageBox.Show(cx.ToString());

                for (int i = 0; i < noTimeSteps; i++)
                {
                    dfsDate.Add(startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time));
                }

                for (int i = 0; i < noTimeSteps; i++)
                {
                    int counter   = 0;
                    int totalNode = 0;

                    for (int j = 0; j < resFile.ItemInfo.Count; j++)
                    {
                        IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j];
                        data    = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i);
                        counter = dynamicItemInfo.ElementCount;
                        for (int z = 0; z < counter; z++)
                        {
                            if (totalNode < comboBox1.Items.Count)
                            {
                                dfsData[i, totalNode] = (Convert.ToSingle(data.Data[z]));
                                totalNode             = totalNode + 1;
                            }
                            else
                            {
                                break;
                            }
                        }
                    }
                }
                var filepath = fileName.Split('\\');
                dfs0Path = filepath[0];
                for (int i = 1; i < filepath.Length - 1; i++)
                {
                    dfs0Path = dfs0Path + @"\" + filepath[i];
                }
                label2.Text = "Loaded successfully.";
            }
            catch (Exception error)
            {
                MessageBox.Show("File have not loaded. Error: " + error.Message);
            }
        }
Пример #14
0
        /// <summary>
        /// Example of how to merge two or more dfs files. The merger is on dynamic item basis,
        /// i.e. add all dynamic items of a number of dfs files to a new dfs file.
        /// <para>
        /// It is assumed that all files has the same time stepping layout. It will merge
        /// as many time steps as the file with the least number of timesteps.
        /// </para>
        /// <para>
        /// If merging one of the specific types of dfs files, dfs0 or dfs1 or dfs2 or dfs3,
        /// the structure of the files must be identical, i.e. the sizes of the axis must equal.
        /// Otherwise, the outcome will not be a valid dfs0/1/2/3 file.
        /// </para>
        /// </summary>
        /// <param name="targetFilename">Path and name of the new file to create</param>
        /// <param name="sourcesFilenames">Path and name of the source dfs files</param>
        public static void MergeDfsFileItems(string targetFilename, IList <string> sourcesFilenames)
        {
            // List of sources to be merged - in case of more than one, just extend this.
            List <IDfsFile> sources = new List <IDfsFile>();

            for (int i = 0; i < sourcesFilenames.Count; i++)
            {
                sources.Add(DfsFileFactory.DfsGenericOpen(sourcesFilenames[i]));
            }

            // Use the first file as skeleton for header and static items.
            IDfsFile     source   = sources[0];
            IDfsFileInfo fileInfo = source.FileInfo;

            DfsBuilder builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion);

            // Set up the header
            builder.SetDataType(fileInfo.DataType);
            builder.SetGeographicalProjection(fileInfo.Projection);
            builder.SetTemporalAxis(fileInfo.TimeAxis);
            builder.SetItemStatisticsType(fileInfo.StatsType);
            builder.DeleteValueByte        = fileInfo.DeleteValueByte;
            builder.DeleteValueDouble      = fileInfo.DeleteValueDouble;
            builder.DeleteValueFloat       = fileInfo.DeleteValueFloat;
            builder.DeleteValueInt         = fileInfo.DeleteValueInt;
            builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt;

            // Transfer compression keys - if any.
            if (fileInfo.IsFileCompressed)
            {
                int[] xkey;
                int[] ykey;
                int[] zkey;
                fileInfo.GetEncodeKey(out xkey, out ykey, out zkey);
                builder.SetEncodingKey(xkey, ykey, zkey);
            }

            // Copy custom blocks - if any
            foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks)
            {
                builder.AddCustomBlock(customBlock);
            }

            int minNumTimesteps = int.MaxValue;

            // Copy dynamic items for all source files
            for (int j = 0; j < sources.Count; j++)
            {
                if (sources[j].FileInfo.TimeAxis.NumberOfTimeSteps < minNumTimesteps)
                {
                    minNumTimesteps = sources[j].FileInfo.TimeAxis.NumberOfTimeSteps;
                }

                foreach (var itemInfo in sources[j].ItemInfo)
                {
                    builder.AddDynamicItem(itemInfo);
                }
            }

            // Create file
            builder.CreateFile(targetFilename);

            // Copy static items - add only from main file
            IDfsStaticItem sourceStaticItem;

            while (null != (sourceStaticItem = source.ReadStaticItemNext()))
            {
                builder.AddStaticItem(sourceStaticItem);
            }

            // Get the file
            DfsFile file = builder.GetFile();

            // Copy dynamic item data
            IDfsItemData sourceData;

            for (int i = 0; i < minNumTimesteps; i++)
            {
                for (int j = 0; j < sources.Count; j++)
                {
                    IDfsFile sourcej = sources[j];
                    // Copy all items for this source
                    for (int k = 0; k < sourcej.ItemInfo.Count; k++)
                    {
                        sourceData = sourcej.ReadItemTimeStepNext();
                        file.WriteItemTimeStepNext(sourceData.Time, sourceData.Data);
                    }
                }
            }

            foreach (IDfsFile sourcej in sources)
            {
                sourcej.Close();
            }
            file.Close();
        }
Пример #15
0
        static void Main(string[] args)
        {
            try
            {
                StringBuilder  sb     = new StringBuilder();
                OpenFileDialog dialog = new OpenFileDialog();
                dialog.Filter = "Mike HD Result Files|*.RES11";

                if (dialog.ShowDialog() != System.Windows.Forms.DialogResult.Cancel)
                {
                    IDfsFile             resFile     = DfsFileFactory.DfsGenericOpen(dialog.FileName);
                    DateTime[]           date        = resFile.FileInfo.TimeAxis.GetDateTimes();
                    DateTime             startDate   = date[0];
                    IDfsFileInfo         resfileInfo = resFile.FileInfo;
                    IDfsItemData <float> data;

                    int          noTimeSteps = resfileInfo.TimeAxis.NumberOfTimeSteps;
                    DateTime[]   dfsDate     = new DateTime[noTimeSteps];
                    List <float> dfsWLData   = new List <float>();
                    List <float> dfsQData    = new List <float>();

                    for (int i = 0; i < noTimeSteps; i++)
                    {
                        dfsDate[i] = startDate.AddHours(resFile.ReadItemTimeStep(1, i).Time);
                    }

                    int totalWNode = 0;
                    int totalQNode = 0;
                    for (int i = 0; i < noTimeSteps; i++)
                    {
                        int Wcounter   = 0;
                        int nodeWCount = 0;

                        int Qcounter   = 0;
                        int nodeQCount = 0;
                        for (int j = 0; j < resFile.ItemInfo.Count; j++)
                        {
                            IDfsSimpleDynamicItemInfo dynamicItemInfo = resFile.ItemInfo[j];
                            string nameOftDynamicItem = dynamicItemInfo.Name;
                            string WLname             = nameOftDynamicItem.Substring(0, 11);
                            string Qname = nameOftDynamicItem.Substring(0, 9);
                            if (WLname == "Water Level")
                            {
                                Wcounter = dynamicItemInfo.ElementCount;
                                data     = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i);
                                for (int z = 0; z < Wcounter; z++)
                                {
                                    dfsWLData.Add(Convert.ToSingle(data.Data[z]));
                                    nodeWCount = nodeWCount + 1;
                                }
                            }

                            else if (Qname == "Discharge")
                            {
                                Qcounter = dynamicItemInfo.ElementCount;
                                data     = (IDfsItemData <float>)resFile.ReadItemTimeStep(j + 1, i);
                                for (int z = 0; z < Qcounter; z++)
                                {
                                    dfsQData.Add(Convert.ToSingle(data.Data[z]));
                                    nodeQCount = nodeQCount + 1;
                                }
                            }
                        }
                        Console.WriteLine(i);
                        totalWNode = nodeWCount;
                        totalQNode = nodeQCount;
                    }
                    for (int i = 0; i < noTimeSteps; i++)
                    {
                        for (int j = 0; j < totalWNode; j++)
                        {
                            sb.AppendLine(dfsDate[i] + "," + (j + 1) + "," + dfsWLData[i * totalWNode + j]);
                        }
                        File.AppendAllText(dialog.FileName.Substring(0, dialog.FileName.Length - 6) + "_WL.csv", sb.ToString());
                        sb.Clear();
                    }

                    for (int i = 0; i < noTimeSteps; i++)
                    {
                        for (int j = 0; j < totalQNode; j++)
                        {
                            sb.AppendLine(dfsDate[i] + "," + (j + 1) + "," + dfsQData[i * totalQNode + j]);
                        }
                        File.AppendAllText(dialog.FileName.Substring(0, dialog.FileName.Length - 6) + "_Q.csv", sb.ToString());
                        sb.Clear();
                    }
                    Console.WriteLine("Result file processed suceesssfully.");
                    Console.WriteLine("Press any key to exit...");
                    Console.ReadKey();
                }
            }
            catch (Exception error)
            {
                Console.WriteLine("HD Model Result files cannot be processed due to an error. Error: " + error.Message);
                Console.WriteLine("Press any key to exit...");
                Console.ReadKey();
            }
        }
Пример #16
0
        //private readonly List<IExchangeItem> _exchangeItems;
        //private readonly List<TimeSerie> _timeSeries;

        /// <summary>
        /// DFS0 reader. Gets information from the dfs file, and reads data.
        /// </summary>
        /// <param name="dfsfile">full path string to dfs0 file.</param>
        public Dfs0Reader(string dfsfile)
            : base(dfsfile)
        {
            // Set ObservationFile
            if (!File.Exists(dfsfile))
            {
                throw new FileNotFoundException("\n ERROR: DFS File Not Found! \n Could not find: {0} \n", dfsfile);
            }

            // Determine Type
            string fileExtension = Path.GetExtension(dfsfile);

            if (System.String.Compare(fileExtension, ".dfs0", System.StringComparison.OrdinalIgnoreCase) == 0)
            {
                fileExtension = Path.GetExtension(dfsfile);
            }
            else
            {
                throw new Exception("\n ERROR: Observation File Type Incorrect! Expecting dfs0. \n  \n");
            }


            // Open the file as a generic dfs file
            _dfs0File = DfsFileFactory.DfsGenericOpen(dfsfile);

            // Header information is contained in the IDfsFileInfo
            IDfsFileInfo fileInfo = _dfs0File.FileInfo;

            // Check for dfs compliance
            CheckDFSCompliance();


            // Number of time steps (same for all items)
            _numTimeSteps = fileInfo.TimeAxis.NumberOfTimeSteps;

            // Starting from...
            int _firstTimeStepIndex = fileInfo.TimeAxis.FirstTimeStepIndex;

            // Number of variable items in dfs0
            _numItems = _dfs0File.ItemInfo.Count;

            // Add the IDs to list (Keys)
            _itemIDs       = new List <string>();
            _quantities    = new List <string>();
            _xyLayerPoints = new List <IXYLayerPoint>();

            foreach (var itemInfo in _dfs0File.ItemInfo)
            {
                String name   = itemInfo.Name;
                var    coords = name.Split(',');


                double x      = Convert.ToDouble(coords[0]);
                double y      = Convert.ToDouble(coords[1]);
                int    zLayer = Convert.ToInt32(coords[2]);

                _quantities.Add(_dfs0File.FileInfo.FileTitle);
                _itemIDs.Add(name);
                _xyLayerPoints.Add(new XYLayerPoint(x, y, zLayer));
            }


            //Gather all times
            _times = _dfs0File.FileInfo.TimeAxis.GetDateTimes().ToList();
            _times = _timesteps;


            DateTime firstTime = _times[0];

            if (_dfs0File.FileInfo.TimeAxis.TimeAxisType != TimeAxisType.CalendarEquidistant)
            {
                //Handle pseudo irreggular files
                double[] dates = new double[_numTimeSteps]; //just make 1 bigger for easy indexing


                for (int iTimeStep = _firstTimeStepIndex; iTimeStep < _numTimeSteps; iTimeStep++)
                {
                    for (int iItem = 1; iItem < _numItems + 1; iItem++)
                    {
                        IDfsItemData data1      = _dfs0File.ReadItemTimeStep(iItem, iTimeStep);
                        double       offsetTime = data1.Time;

                        if (iItem == 1)
                        {
                            dates[iTimeStep] = offsetTime;
                        }
                        else
                        {
                            if (Math.Abs(offsetTime - dates[iTimeStep]) > 1.0)
                            {
                                throw new Exception("Non Equidistant Calander is not regular");
                            }
                        }
                    }
                    if (iTimeStep > 0)
                    {
                        _times[iTimeStep] = _times[0].AddSeconds(dates[iTimeStep]);
                    }
                }
            }

            IList <IDfsDynamicItemInfo> infoAllTimes = _dfs0File.ItemInfo;
            String TimeSeriesName = infoAllTimes[0].Name;

            // Delelte Values
            _deleteValueDouble = _dfs0File.FileInfo.DeleteValueDouble;
            _deleteValueFloat  = _dfs0File.FileInfo.DeleteValueFloat;
        }
Пример #17
0
        /// <summary>
        /// Read Time series data DFS0 to FEWS PI memory data structure
        /// </summary>
        /// <param name="pi">Memory data structure where content of the DFS0 file will be
        /// add</param>
        /// <param name="rootPath">Root directory where dfs0 files are placed</param>
        /// <param name="relativePath">Full file path relative to rootPath</param>
        /// <param name="ensembleId">Ensemble Id identifying where to put data from
        /// file</param>
        /// <param name="ensembleMemberId">Ensemble member Id identifying where to put data
        /// from file</param>
        /// <param name="ensembleMemberIndex">Ensemble member index</param>
        public bool ReadDfs0File(ref PI pi, string rootPath, string relativePath, string ensembleId, string ensembleMemberId, int ensembleMemberIndex)
        {
            var          dfs0File          = DfsFileFactory.DfsGenericOpen(Path.Combine(rootPath, relativePath));
            IDfsFileInfo fileInfo          = dfs0File.FileInfo;
            int          numberOfTimeSteps = fileInfo.TimeAxis.NumberOfTimeSteps;
            DateTime     start             = DateTime.MinValue;
            DateTime     end = DateTime.MinValue;

            if (fileInfo.TimeAxis.IsCalendar())
            {
                if (dfs0File.FileInfo.TimeAxis.IsEquidistant())
                {
                    start = (dfs0File.FileInfo.TimeAxis as IDfsEqCalendarAxis).StartDateTime;
                }
                else
                {
                    start = (dfs0File.FileInfo.TimeAxis as IDfsNonEqCalendarAxis).StartDateTime;
                }
            }
            for (int itemIndex = 0; itemIndex < dfs0File.ItemInfo.Count; itemIndex++)
            {
                var ts = new TimeSeries();
                switch (dfs0File.ItemInfo[itemIndex].ValueType = DataValueType.Instantaneous)
                {
                case DataValueType.Instantaneous:
                    ts.Type = "instantaneous";
                    break;

                default:
                    ts.Type = "instantaneous";
                    break;
                }
                ts.X          = dfs0File.ItemInfo[itemIndex].ReferenceCoordinateX;
                ts.Y          = dfs0File.ItemInfo[itemIndex].ReferenceCoordinateY;
                ts.Z          = dfs0File.ItemInfo[itemIndex].ReferenceCoordinateZ;
                ts.LocationId = relativePath;
                if (!string.IsNullOrEmpty(ensembleId))
                {
                    ts.EnsembleId          = ensembleId;
                    ts.EnsembleMemberId    = ensembleMemberId;
                    ts.EnsembleMemberIndex = ensembleMemberIndex;
                }
                if (dfs0File.ItemInfo.Count > 1)
                {
                    ts.LocationId = $"{ts.LocationId}|{dfs0File.ItemInfo[itemIndex].Name}";
                }
                ts.TimeStep    = new TimeSpan(1, 0, 0);
                ts.StationName = ts.LocationId;
                ts.ParameterId = dfs0File.ItemInfo[itemIndex].Quantity.Item.ToString() + ";" + dfs0File.ItemInfo[itemIndex].Quantity.Unit.ToString();
                ts.MissVal     = -999999.9;
                ts.StartDate   = start;
                ts.EndDate     = start;
                ts.Units       = "";
                var      deleteVal = dfs0File.FileInfo.DeleteValueFloat;
                DateTime step1     = DateTime.MinValue;
                DateTime step2     = DateTime.MinValue;
                for (int timeStepIndex = 0; timeStepIndex < fileInfo.TimeAxis.NumberOfTimeSteps; timeStepIndex++)
                {
                    double value = ts.MissVal.Value;

                    var   values = dfs0File.ReadItemTimeStep(itemIndex + 1, timeStepIndex);
                    float fvalue = (float)(values.Data.GetValue(0));
                    if (Math.Abs(fvalue - deleteVal) > float.Epsilon)
                    {
                        value = fvalue;
                    }
                    var time = values.TimeAsDateTime(dfs0File.FileInfo.TimeAxis);

                    ts.Values.Add(time, new TSValue(value));
                    ts.EndDate = time;
                    if (step1 == DateTime.MinValue)
                    {
                        step1 = time;
                    }
                    else if (step2 == DateTime.MinValue)
                    {
                        ts.TimeStep = time - step1;
                        step2       = time;
                    }
                }
                pi.TimeSeries.Add(ts);
            }
            return(true);
        }
Пример #18
0
        /// <summary>
        /// Example of how to copy a Dfs file.
        /// <para>
        /// This example is intended to show how to generically copy a file. In
        /// case a copy with modified data is required, this could be used as a base
        /// for the copy.
        /// </para>
        /// </summary>
        /// <param name="sourceFilename">Path and name of the source dfs file</param>
        /// <param name="filename">Path and name of the new file to create</param>
        public static void CopyDfsFile(string sourceFilename, string filename)
        {
            IDfsFile     source   = DfsFileFactory.DfsGenericOpen(sourceFilename);
            IDfsFileInfo fileInfo = source.FileInfo;

            DfsBuilder builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion);

            // Set up the header
            builder.SetDataType(fileInfo.DataType);
            builder.SetGeographicalProjection(fileInfo.Projection);
            builder.SetTemporalAxis(fileInfo.TimeAxis);
            builder.SetItemStatisticsType(fileInfo.StatsType);
            builder.DeleteValueByte        = fileInfo.DeleteValueByte;
            builder.DeleteValueDouble      = fileInfo.DeleteValueDouble;
            builder.DeleteValueFloat       = fileInfo.DeleteValueFloat;
            builder.DeleteValueInt         = fileInfo.DeleteValueInt;
            builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt;

            // Transfer compression keys - if any.
            if (fileInfo.IsFileCompressed)
            {
                int[] xkey;
                int[] ykey;
                int[] zkey;
                fileInfo.GetEncodeKey(out xkey, out ykey, out zkey);
                builder.SetEncodingKey(xkey, ykey, zkey);
            }

            // Copy custom blocks - if any
            foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks)
            {
                builder.AddCustomBlock(customBlock);
            }

            // Copy dynamic items
            foreach (var itemInfo in source.ItemInfo)
            {
                builder.AddDynamicItem(itemInfo);
            }

            // Create file
            builder.CreateFile(filename);

            // Copy static items
            IDfsStaticItem sourceStaticItem;

            while (null != (sourceStaticItem = source.ReadStaticItemNext()))
            {
                builder.AddStaticItem(sourceStaticItem);
            }

            // Get the file
            DfsFile file = builder.GetFile();

            // Copy dynamic item data
            IDfsItemData sourceData;

            while (null != (sourceData = source.ReadItemTimeStepNext()))
            {
                file.WriteItemTimeStepNext(sourceData.Time, sourceData.Data);
            }

            source.Close();
            file.Close();
        }
Пример #19
0
        /// <summary>
        /// Create a new file, being the difference of two files.
        /// <para>
        /// The two input files must be equal in structure, e.g. coming
        /// from the same simulation but giving different results.
        /// Header and static data must be identical, only difference
        /// must be in values of the dynamic data.
        /// </para>
        /// </summary>
        public static void CreateDiffFile(string file1, string file2, string filediff = null)
        {
            IDfsFile dfs1 = DfsFileFactory.DfsGenericOpen(file1);
            IDfsFile dfs2 = DfsFileFactory.DfsGenericOpen(file2);

            // Validate that it has the same number of items.
            if (dfs1.ItemInfo.Count != dfs2.ItemInfo.Count)
            {
                throw new Exception("Number of dynamic items does not match");
            }
            int numItems = dfs1.ItemInfo.Count;

            // In case number of time steps does not match, take the smallest.
            int numTimes = dfs1.FileInfo.TimeAxis.NumberOfTimeSteps;

            if (numTimes > dfs2.FileInfo.TimeAxis.NumberOfTimeSteps)
            {
                numTimes = dfs2.FileInfo.TimeAxis.NumberOfTimeSteps;
                Console.Out.WriteLine("Number of time steps does not match, using the smallest number");
            }

            // For recording max difference for every item
            double[] maxDiff = new double[dfs1.ItemInfo.Count];
            // Index in time (index) of maximum and first difference. -1 if no difference
            int[] maxDiffTime   = new int[dfs1.ItemInfo.Count];
            int[] firstDiffTime = new int[dfs1.ItemInfo.Count];
            for (int i = 0; i < dfs1.ItemInfo.Count; i++)
            {
                maxDiffTime[i]   = -1;
                firstDiffTime[i] = -1;
            }

            // Copy over info from the first file, assuming the second file contains the same data.
            IDfsFileInfo fileInfo = dfs1.FileInfo;

            DfsBuilder builder = null;

            if (!string.IsNullOrEmpty(filediff))
            {
                builder = DfsBuilder.Create(fileInfo.FileTitle, fileInfo.ApplicationTitle, fileInfo.ApplicationVersion);

                // Set up the header
                builder.SetDataType(fileInfo.DataType);
                builder.SetGeographicalProjection(fileInfo.Projection);
                builder.SetTemporalAxis(fileInfo.TimeAxis);
                builder.SetItemStatisticsType(fileInfo.StatsType);
                builder.DeleteValueByte        = fileInfo.DeleteValueByte;
                builder.DeleteValueDouble      = fileInfo.DeleteValueDouble;
                builder.DeleteValueFloat       = fileInfo.DeleteValueFloat;
                builder.DeleteValueInt         = fileInfo.DeleteValueInt;
                builder.DeleteValueUnsignedInt = fileInfo.DeleteValueUnsignedInt;

                // Transfer compression keys.
                if (fileInfo.IsFileCompressed)
                {
                    int[] xkey;
                    int[] ykey;
                    int[] zkey;
                    fileInfo.GetEncodeKey(out xkey, out ykey, out zkey);
                    builder.SetEncodingKey(xkey, ykey, zkey);
                }

                // Copy custom blocks
                foreach (IDfsCustomBlock customBlock in fileInfo.CustomBlocks)
                {
                    builder.AddCustomBlock(customBlock);
                }
            }

            // Copy dynamic item definitions
            bool[] floatItems = new bool[dfs1.ItemInfo.Count];
            for (int i = 0; i < dfs1.ItemInfo.Count; i++)
            {
                var itemInfo = dfs1.ItemInfo[i];

                // Validate item sizes
                var itemInfo2 = dfs2.ItemInfo[i];
                if (itemInfo.ElementCount != itemInfo2.ElementCount)
                {
                    throw new Exception("Dynamic items must have same size, item number " + (i + 1) +
                                        " has different sizes in the two files");
                }
                // Validate the data type, only supporting floats and doubles.
                if (itemInfo.DataType == DfsSimpleType.Float)
                {
                    floatItems[i] = true;
                }
                else if (itemInfo.DataType != DfsSimpleType.Double)
                {
                    throw new Exception("Dynamic item must be double or float, item number " + (i + 1) + " is of type " +
                                        (itemInfo.DataType));
                }

                builder?.AddDynamicItem(itemInfo);
            }

            // Create file
            builder?.CreateFile(filediff);

            if (builder != null)
            {
                // Copy over static items from file 1, assuming the static items of file 2 are identical
                IDfsStaticItem si1;
                while (null != (si1 = dfs1.ReadStaticItemNext()))
                {
                    builder.AddStaticItem(si1);
                }
            }

            // Get the file
            DfsFile diff = builder?.GetFile();

            // Write dynamic data to the file, being the difference between the two
            for (int i = 0; i < numTimes; i++)
            {
                for (int j = 0; j < numItems; j++)
                {
                    if (floatItems[j])
                    {
                        IDfsItemData <float> data1 = dfs1.ReadItemTimeStepNext() as IDfsItemData <float>;
                        IDfsItemData <float> data2 = dfs2.ReadItemTimeStepNext() as IDfsItemData <float>;
                        for (int k = 0; k < data1.Data.Length; k++)
                        {
                            float valuediff = data1.Data[k] - data2.Data[k];
                            data1.Data[k] = valuediff;
                            float absValueDiff = Math.Abs(valuediff);
                            if (absValueDiff > maxDiff[j])
                            {
                                maxDiff[j]     = absValueDiff;
                                maxDiffTime[j] = i;
                                if (firstDiffTime[j] == -1)
                                {
                                    firstDiffTime[j] = i;
                                }
                            }
                        }
                        diff?.WriteItemTimeStepNext(data1.Time, data1.Data);
                    }
                    else
                    {
                        IDfsItemData <double> data1 = dfs1.ReadItemTimeStepNext() as IDfsItemData <double>;
                        IDfsItemData <double> data2 = dfs2.ReadItemTimeStepNext() as IDfsItemData <double>;
                        for (int k = 0; k < data1.Data.Length; k++)
                        {
                            double valuediff = data1.Data[k] - data2.Data[k];
                            data1.Data[k] = valuediff;
                            double absValueDiff = Math.Abs(valuediff);
                            if (absValueDiff > maxDiff[j])
                            {
                                maxDiff[j]     = absValueDiff;
                                maxDiffTime[j] = i;
                                if (firstDiffTime[j] == -1)
                                {
                                    firstDiffTime[j] = i;
                                }
                            }
                        }
                        diff?.WriteItemTimeStepNext(data1.Time, data1.Data);
                    }
                }
            }

            System.Console.WriteLine("Difference statistics:");
            for (int i = 0; i < maxDiffTime.Length; i++)
            {
                if (maxDiffTime[i] < 0)
                {
                    Console.WriteLine("{0,-30}: no difference", dfs1.ItemInfo[i].Name);
                }
                else
                {
                    Console.WriteLine("{0,-30}: Max difference at timestep {1,3}: {2}. First difference at timestep {3}", dfs1.ItemInfo[i].Name, maxDiffTime[i], maxDiff[i], firstDiffTime[i]);
                }
            }

            dfs1.Close();
            dfs2.Close();
            diff?.Close();
        }
Пример #20
0
        public bool GenerateWebTideNode(TVFileModel TVFileModelBC, int WebTideNodeNumb, List <Coord> CoordList, List <TVFileModel> tvFileModelList, int BoundaryConditionCodeNumber, List <List <WaterLevelResult> > AllWLResults, List <IEnumerable <CurrentResult> > AllCurrentResults)
        {
            List <eumItem> eumItemList = new List <eumItem>();

            DfsFactory factory = new DfsFactory();

            IDfsFile dfsOldFile = DfsFileFactory.DfsGenericOpen(TVFileModelBC.ServerFilePath + TVFileModelBC.ServerFileName);

            DfsBuilder dfsNewFile = DfsBuilder.Create(dfsOldFile.FileInfo.FileTitle, dfsOldFile.FileInfo.ApplicationTitle, dfsOldFile.FileInfo.ApplicationVersion);

            double WebTideStepsInMinutes = ((double)((IDfsEqCalendarAxis)((dfsOldFile.FileInfo).TimeAxis)).TimeStep / 60);

            DateTime?dateTimeTemp      = null;
            int?     NumberOfTimeSteps = null;
            int?     TimeStepInterval  = null;

            using (PFS pfs = new PFS(base.fi))
            {
                dateTimeTemp = pfs.GetVariableDateTime("FemEngineHD/TIME", "start_time");
                if (dateTimeTemp == null)
                {
                    dfsOldFile.Close();
                    return(false);
                }

                NumberOfTimeSteps = pfs.GetVariable <int>("FemEngineHD/TIME", "number_of_time_steps", 1);
                if (NumberOfTimeSteps == null)
                {
                    dfsOldFile.Close();
                    return(false);
                }

                TimeStepInterval = pfs.GetVariable <int>("FemEngineHD/TIME", "time_step_interval", 1);
                if (TimeStepInterval == null)
                {
                    dfsOldFile.Close();
                    return(false);
                }
            }

            DateTime StartDate = ((DateTime)dateTimeTemp).AddHours(-1);
            DateTime EndDate   = ((DateTime)dateTimeTemp).AddSeconds((int)NumberOfTimeSteps * (int)TimeStepInterval).AddHours(1);

            dfsNewFile.SetDataType(dfsOldFile.FileInfo.DataType);
            dfsNewFile.SetGeographicalProjection(dfsOldFile.FileInfo.Projection);
            dfsNewFile.SetTemporalAxis(factory.CreateTemporalEqCalendarAxis(eumUnit.eumUsec, StartDate, 0, WebTideStepsInMinutes * 60));
            dfsNewFile.SetItemStatisticsType(StatType.RegularStat);

            foreach (IDfsDynamicItemInfo di in dfsOldFile.ItemInfo)
            {
                DfsDynamicItemBuilder ddib = dfsNewFile.CreateDynamicItemBuilder();
                ddib.Set(di.Name, eumQuantity.Create(di.Quantity.Item, di.Quantity.Unit), di.DataType);
                ddib.SetValueType(di.ValueType);
                ddib.SetAxis(factory.CreateAxisEqD1(eumUnit.eumUsec, CoordList.Count, 0, 1));
                ddib.SetReferenceCoordinates(di.ReferenceCoordinateX, di.ReferenceCoordinateY, di.ReferenceCoordinateZ);
                dfsNewFile.AddDynamicItem(ddib.GetDynamicItemInfo());
                eumItemList.Add(di.Quantity.Item);
            }

            dfsOldFile.Close();

            string[]      NewFileErrors = dfsNewFile.Validate();
            StringBuilder sbErr         = new StringBuilder();

            foreach (string s in NewFileErrors)
            {
                sbErr.AppendLine(s);
            }

            if (NewFileErrors.Count() > 0)
            {
                ErrorMessage = string.Format(CSSPDHIRes.CouldNotCreate_, TVFileModelBC.ServerFileName.Replace(".dfs0", "dfs1"));
                OnCSSPDHIChanged(new CSSPDHIEventArgs(new CSSPDHIMessage("Error", -1, false, ErrorMessage)));
                return(false);
            }

            string NewFileNameBC = TVFileModelBC.ServerFileName;

            if (CoordList.Count == 0)
            {
                ErrorMessage = CSSPDHIRes.NumberOfWebTideNodesIsZero;
                OnCSSPDHIChanged(new CSSPDHIEventArgs(new CSSPDHIMessage("Error", -1, false, ErrorMessage)));
                return(false);
            }

            if (eumItemList.Count == 1)
            {
                if (eumItemList[0] == eumItem.eumIWaterLevel || eumItemList[0] == eumItem.eumIWaterDepth)
                {
                    List <WaterLevelResult> WLResults = null;

                    dfsNewFile.CreateFile(TVFileModelBC.ServerFilePath + NewFileNameBC);
                    IDfsFile file = dfsNewFile.GetFile();
                    for (int i = 0; i < WLResults.ToList().Count; i++)
                    {
                        float[] floatArray = new float[AllWLResults.Count];

                        for (int j = 0; j < AllWLResults.Count; j++)
                        {
                            floatArray[j] = ((float)((List <WaterLevelResult>)AllWLResults[j].ToList())[i].WaterLevel);
                        }



                        file.WriteItemTimeStepNext(0, floatArray);  // water level array
                    }
                    file.Close();
                }
                else
                {
                    ErrorMessage = string.Format(CSSPDHIRes.FileContainsOneParamButItsNotOfTypeWLOrWDItIs_, eumItemList[0].ToString());
                    OnCSSPDHIChanged(new CSSPDHIEventArgs(new CSSPDHIMessage("Error", -1, false, ErrorMessage)));
                    return(false);
                }
            }
            else if (eumItemList.Count == 2)
            {
                if (eumItemList[0] == eumItem.eumIuVelocity && eumItemList[1] == eumItem.eumIvVelocity)
                {
                    // read web tide for the required time
                    List <CurrentResult> CurrentResults = null;

                    dfsNewFile.CreateFile(TVFileModelBC.ServerFilePath + NewFileNameBC);
                    IDfsFile file = dfsNewFile.GetFile();
                    for (int i = 0; i < CurrentResults.ToList().Count; i++)
                    {
                        float[] floatArrayX = new float[AllCurrentResults.Count];
                        float[] floatArrayY = new float[AllCurrentResults.Count];

                        for (int j = 0; j < AllCurrentResults.Count; j++)
                        {
                            floatArrayX[j] = ((float)((List <CurrentResult>)AllCurrentResults[j].ToList())[i].x_velocity);
                            floatArrayY[j] = ((float)((List <CurrentResult>)AllCurrentResults[j].ToList())[i].y_velocity);
                        }

                        file.WriteItemTimeStepNext(0, floatArrayX);  // Current xVelocity
                        file.WriteItemTimeStepNext(0, floatArrayY);  // Current yVelocity
                    }
                    file.Close();
                }
                else
                {
                    ErrorMessage = string.Format(CSSPDHIRes.FileContains2ParamButItsNotOfUVAndVVItIs_And_, eumItemList[0].ToString(), eumItemList[1].ToString());
                    OnCSSPDHIChanged(new CSSPDHIEventArgs(new CSSPDHIMessage("Error", -1, false, ErrorMessage)));
                    return(false);
                }
            }
            else
            {
                // this is not a file that is used for Water Level or Currents
            }

            return(false);
        }