public void WriteTest() { DFS2 outdata = new DFS2(@"..\..\..\TestData\simpelmatrixKopi.dfs2"); DenseMatrix M = outdata.GetData(0, 1); DenseMatrix M2; M[2, 2] = 2000; for (int i = 0; i < 10; i++) { outdata.SetData(i + 8, 1, M); M2 = outdata.GetData(i + 8, 1); Assert.IsTrue(M.Equals(M2)); } DateTime d = new DateTime(1950, 1, 1); string dd = d.ToShortDateString(); outdata.TimeOfFirstTimestep = new DateTime(1950, 1, 1); outdata.TimeStep = new TimeSpan(20, 0, 0, 0); outdata.Dispose(); }
public void GetDataTest1() { DenseMatrix M = _simpleDfs.GetData(0, 1); Assert.AreEqual(0, M[0, 0]); Assert.AreEqual(1, M[1, 0]); Assert.AreEqual(2, M[2, 0]); Assert.AreEqual(3, M[0, 1]); Assert.AreEqual(10, _simpleDfs.GetData(323, 125, 0, 1), 1e-5); }
public void MartinData() { Program_Accessor.Main(new string[] { @"..\..\..\Testdata\GWL_diff.xml" }); DFS2 outp = new DFS2(@"..\..\..\Testdata\test_Diff_GWL.dfs2"); Assert.AreEqual(13, outp.GetData(0, 1)[234, 160]); Assert.AreEqual(13, outp.GetData(1, 1)[234, 160]); Assert.AreEqual(14, outp.GetData(2, 1)[234, 160]); Assert.AreEqual(42.4304, outp.GetData(4, 1)[231, 160], 0.001); outp.Dispose(); }
private void DistributeFromDFS2() { ReClaimIntakes(); List <IIntake> IntakesToKeep = new List <IIntake>(); foreach (IIntake I in Intakes) { int col = _gridCodes.GetColumnIndex(I.well.X); int row = _gridCodes.GetRowIndex(I.well.X); if (col >= 0 & row >= 0) { if (_gridCodes.GetData(0, 1)[row, col] != _gridCodes.DeleteValue) { IntakesToKeep.Add(I); Intakes.Remove(I); } } } MoveIntakes(); foreach (IIntake I in IntakesToKeep) { Intakes.Add(I); } }
/// <summary> /// Makes a simple a mathematical operation on two items from .dfs2-files /// </summary> /// <param name="OperationData"></param> public static void GridMath(XElement OperationData) { DFS3.MaxEntriesInBuffer = 1; DFS2.MaxEntriesInBuffer = 1; string File1 = OperationData.Element("DFS2FileName1").Value; int Item1 = int.Parse(OperationData.Element("Item1").Value); string Operator = OperationData.Element("MathOperation").Value; string File2 = OperationData.Element("DFS2FileName2").Value; int Item2 = int.Parse(OperationData.Element("Item2").Value); string DFS2OutPut = OperationData.Element("DFS2OutputFileName").Value; DFS2 dfsFile1 = new DFS2(File1); DFS2 dfsFile2 = new DFS2(File2); DFS2 outputFile = new DFS2(DFS2OutPut, 1); outputFile.CopyFromTemplate(dfsFile1); outputFile.FirstItem.Name = dfsFile1.Items[Item1 - 1].Name + " " + Operator + " " + dfsFile2.Items[Item2 - 1].Name; outputFile.FirstItem.EumItem = dfsFile1.Items[Item1 - 1].EumItem; outputFile.FirstItem.EumUnit = dfsFile1.Items[Item1 - 1].EumUnit; for (int i = 0; i < dfsFile1.NumberOfTimeSteps; i++) { DenseMatrix M1 = dfsFile1.GetData(i, Item1); DenseMatrix M2 = dfsFile2.GetData(i, Item2); DenseMatrix M3 = new DenseMatrix(M1.RowCount, M1.ColumnCount); switch (Operator) { case "+": M3 = M1 + M2; break; case "-": M3 = M1 - M2; break; case "*": M1.PointwiseMultiply(M2, M3); break; case "/": M1.PointwiseDivide(M2, M3); break; } RecreateDeleteValues(M1, M3, dfsFile1.DeleteValue); outputFile.SetData(i, 1, M3); } dfsFile1.Dispose(); dfsFile2.Dispose(); outputFile.Dispose(); }
/// <summary> /// Small program that extracts a time series from a time variant dfs2. /// </summary> /// <param name="args"></param> static void Main(string[] args) { TimespanSeries PrecipTS = new TimespanSeries(); PrecipTS.Name = "Precipitation"; PrecipTS.Description = "Precipitation extracted from dfs2"; PrecipTS.Unit = UnitFactory.Instance.GetUnit(NamedUnits.millimeterperday); //Open the DFS2-file DFS2 precip = new DFS2(@"C:\Users\Jacob\Projekter\MikeSheWrapperForGEUS\novomr6\2-layer-filer\Standard_korrigeret_Prec_DK_10km_1990-2008.dfs2"); //UTM-coordinates for Gjeller sø double XUTM = 456198; double YUTM = 6272321; //Get column and row index from UTM- coordinates int col = precip.GetColumnIndex(XUTM); int row = precip.GetRowIndex(YUTM); //Loop all the time steps for (int i = 0; i < precip.NumberOfTimeSteps; i++) { //Extract the value var val = precip.GetData(i, 1)[row, col]; //Insert into timeseries PrecipTS.AddValue(precip.TimeSteps[i].Subtract(TimeSpan.FromDays(1)), precip.TimeSteps[i], val); } precip.Dispose(); //Now do the same for evaporation. DFS2-file may have another grid and timesteps DFS2 evap = new DFS2(@"C:\Users\Jacob\Projekter\MikeSheWrapperForGEUS\novomr6\2-layer-filer\Novana_DK_EPmak_40km_1990-1998_20km_1999-2008_ed.dfs2"); TimespanSeries EvapTS = new TimespanSeries(); EvapTS.Name = "Evaporation"; EvapTS.Description = "Evaporation extracted from dfs2"; EvapTS.Unit = UnitFactory.Instance.GetUnit(NamedUnits.millimeterperday); //Get column and row index from UTM- coordinates col = evap.GetColumnIndex(XUTM); row = evap.GetRowIndex(YUTM); for (int i = 0; i < evap.NumberOfTimeSteps; i++) { //Extract the value var val = evap.GetData(i, 1)[row, col]; //Insert into timeseries EvapTS.AddValue(evap.TimeSteps[i].Subtract(TimeSpan.FromDays(1)), evap.TimeSteps[i], val); } //Put all time series into a group and save TimeSeriesGroup tsgroup = new TimeSeriesGroup(); tsgroup.Items.Add(PrecipTS); tsgroup.Items.Add(EvapTS); tsgroup.Save(@"C:\Users\Jacob\Projekter\GWSW-Interaction\Gjeller Sø\climate.xts"); }
public void CreateFile() { DFS2 df = new DFS2(@"..\..\..\TestData\test.dfs2", 1); df.NumberOfColumns = 5; df.NumberOfRows = 7; df.XOrigin = 9000; df.YOrigin = 6000; df.Orientation = 1; df.GridSize = 15; df.TimeOfFirstTimestep = new DateTime(2011, 10, 1, 23, 0, 0); var s = df.TimeOfFirstTimestep.ToString("yyyy-MM-dd"); df.TimeStep = TimeSpan.FromHours(2); df.FirstItem.Name = "SGS Kriged dyn. corr.precip"; df.FirstItem.EumItem = eumItem.eumIPrecipitationRate; df.FirstItem.EumUnit = eumUnit.eumUmillimeterPerDay; DenseMatrix m = new DenseMatrix(df.NumberOfRows, df.NumberOfColumns); m[3, 4] = 25; df.SetData(0, 1, m); df.SetData(1, 1, m); m[3, 4] = 15; df.SetData(2, 1, m); df.Dispose(); df = new DFS2(@"..\..\..\TestData\test.dfs2"); Assert.AreEqual(eumItem.eumIPrecipitationRate, df.FirstItem.EumItem); Assert.AreEqual(eumUnit.eumUmillimeterPerDay, df.FirstItem.EumUnit); DenseMatrix m2 = df.GetData(1, 1); Assert.AreEqual(25, m2[3, 4]); DFS2 df2 = new DFS2(@"..\..\..\TestData\test2.dfs2", df); df2.SetData(0, 1, m); Assert.AreEqual(eumItem.eumIPrecipitationRate, df2.FirstItem.EumItem); Assert.AreEqual(eumUnit.eumUmillimeterPerDay, df2.FirstItem.EumUnit); Assert.AreEqual(df.GridSize, df2.GridSize); Assert.AreEqual(df.NumberOfColumns, df2.NumberOfColumns); Assert.AreEqual(df.NumberOfRows, df2.NumberOfRows); Assert.AreEqual(df.Orientation, df2.Orientation); Assert.AreEqual(df.TimeOfFirstTimestep, df2.TimeOfFirstTimestep); Assert.AreEqual(df.TimeStep, df2.TimeStep); Assert.AreEqual(df.XOrigin, df2.XOrigin); Assert.AreEqual(df.YOrigin, df2.YOrigin); Assert.AreEqual(df.FirstItem.Name, df2.FirstItem.Name); Assert.AreEqual(df.Items.Count(), df2.Items.Count()); Assert.AreEqual(df.DeleteValue, df2.DeleteValue); }
public void GridEdit() { DFS2 dfs = new DFS2(@"C:\Jacob\OpenDA.DotNet_vs2008\mshe\Mshe_5x5\Initial Potential Head.dfs2"); var m = dfs.GetData(0, 1); for (int i = 0; i < m.Data.Count(); i++) { m.Data[i] = -0.01 * i; } dfs.SetData(0, 1, m); dfs.Dispose(); }
public static string GetASCIIGrid(this DFS2 dfsfile, int TimeStep, int Item) { ASCIIGrid ascii = new ASCIIGrid(); ascii.NumberOfColumns = dfsfile.NumberOfColumns; ascii.NumberOfRows = dfsfile.NumberOfRows; ascii.XOrigin = dfsfile.XOrigin; ascii.YOrigin = dfsfile.YOrigin; ascii.GridSize = dfsfile.GridSize; ascii.DeleteValue = dfsfile.DeleteValue; ascii.Data = dfsfile.GetData(TimeStep, Item); return(ascii.ToString()); }
public void Initialize() { dfs = new DFS2(FileName.FileName); int itemnumber = 1; if (FileName.ColumnNames.Count > 0) { var dfsitem = dfs.Items.FirstOrDefault(I => I.Name == FileName.ColumnNames[0]); if (dfsitem != null) { itemnumber = dfsitem.ItemNumber; } } Data = dfs.GetData(0, itemnumber); }
public void CreateCompressedFromTemplate() { DFS2 dfsorg = new DFS2(@"..\..\..\TestData\Novomr1_inv_PreProcessed.DFS2"); DFS2 dfs = new DFS2(@"..\..\..\TestData\Novomr1_inv_PreProcessed_rewritten.DFS2", dfsorg); for (int i = 1; i <= dfsorg.NumberOfItems; i++) { dfs.SetData(0, i, dfsorg.GetData(0, i)); } dfsorg.Dispose(); dfs.Dispose(); var f1 = new System.IO.FileInfo(@"..\..\..\TestData\Novomr1_inv_PreProcessed.DFS2"); var f2 = new System.IO.FileInfo(@"..\..\..\TestData\Novomr1_inv_PreProcessed_rewritten.DFS2"); Assert.AreEqual(f1.Length, f2.Length, 50); }
/// <summary> /// Returns the height at the point using the method selected with the enums /// </summary> /// <param name="point"></param> /// <param name="height"></param> /// <returns></returns> public bool TryFindDemHeight(IXYPoint point, out double?height) { height = null; switch (DEMSource) { case SourceType.Oracle: return(Oracle.TryGetHeight(point, out height)); case SourceType.KMSWeb: return(KMSData.TryGetHeight(point, 32, out height)); case SourceType.DFS2: int col = DFSdem.GetColumnIndex(point.X); int row = DFSdem.GetRowIndex(point.Y); if (col >= 0 & row >= 0) { height = DFSdem.GetData(0, 1)[row, col]; return(true); } else { return(false); } default: return(false); case SourceType.HydroInform: { XYPoint p = point as XYPoint; if (LDC.State == System.ServiceModel.CommunicationState.Faulted) { return(false); } var d = LDC.GetHeight(p.Latitude, p.Longitude); if (d.HasValue) { height = d.Value; } return(d.HasValue); } } return(false); }
public void OpenTwiceTest() { DFS2 dfs = new DFS2(@"..\..\..\TestData\Novomr1_inv_PreProcessed.DFS2"); List <DFS2> _files = new List <DFS2>(); for (int i = 0; i < 100; i++) { _files.Add(new DFS2(@"..\..\..\TestData\Novomr1_inv_PreProcessed.DFS2")); Matrix M = _files[i].GetData(0, 1); } int k = 0; DFS2.MaxEntriesInBuffer = 5; for (int i = 1; i < dfs.Items.Count(); i++) { Matrix M = dfs.GetData(0, i); } }
static void Main(string[] args) { DFS2 df = new DFS2(args[0]); DFS2 dfnew = new DFS2(Path.ChangeExtension(args[0], "") + "_deletes.dfs2", df); double threshold = 1; if (args.Count() > 1) { double.TryParse(args[1], out threshold); } for (int i = 0; i < df.TimeSteps.Count; i++) { for (int j = 1; j <= df.Items.Count(); j++) { var M = df.GetData(i, j); for (int m = 0; m < df.NumberOfColumns; m++) { for (int n = 0; n < df.NumberOfRows; n++) { if (M[n, m] > threshold) { M[n, m] = threshold; } } } dfnew.SetData(i, j, M); } } dfnew.Dispose(); df.Dispose(); }
static void Main(string[] args) { string dataFile = args[0]; string gridFile = args[1]; string outfilename = Path.Combine(Path.GetDirectoryName(dataFile), "WaterOnTerrain.txt"); string dfsoutname = Path.Combine(Path.GetDirectoryName(dataFile), "WaterOnTerrain.dfs2"); double LowerLevel = 0; if (args.Count() > 2) { double.TryParse(args[2], out LowerLevel); } DFS2 Data = new DFS2(dataFile); DFS2 Grid = new DFS2(gridFile); DFS2 dfsout = new DFS2(dfsoutname, Grid); DenseMatrix dmout = new DenseMatrix(Grid.NumberOfRows, Grid.NumberOfColumns); //Read the grid and fill into a matrix DenseMatrix dmg = Grid.GetData(0, 1); Dictionary <double, int> GridAreas = new Dictionary <double, int>(); for (int i = 0; i < dmg.RowCount; i++) { for (int j = 0; j < dmg.ColumnCount; j++) { if (!GridAreas.ContainsKey(dmg[i, j])) { GridAreas.Add(dmg[i, j], 0); } } } List <double> keys = new List <double>(GridAreas.Keys); using (StreamWriter sw = new StreamWriter(outfilename)) { string Line = "Time"; //Build header foreach (var key in GridAreas.Keys) { Line += "\tGridCode\tNumberOfCells\tArea"; } sw.WriteLine(Line); //Time loop for (int t = 0; t < Data.NumberOfTimeSteps; t++) { Line = Data.TimeSteps[t].ToString("dd-MM-yyyy HH:mm:ss"); DenseMatrix dmd = Data.GetData(t, 1); for (int k = 0; k < dmd.Data.Count(); k++) { if (dmd.Data[k] > LowerLevel) { dmout.Data[k] = dmout.Data[k] + 1; GridAreas[dmg.Data[k]]++; } } //Build line foreach (var kvp in GridAreas) { Line += "\t" + (int)kvp.Key + "\t" + kvp.Value + "\t" + kvp.Value * Math.Pow((float)Data.GridSize, 2); } sw.WriteLine(Line); //Reset foreach (var k in keys) { GridAreas[k] = 0; } } dfsout.SetData(0, 1, dmout); } Data.Dispose(); Grid.Dispose(); dfsout.Dispose(); }
public void PercentileTest2() { DFSBase target = DfsFileFactory.OpenFile(@"..\..\..\TestData\TestDataSet.dfs2"); double[] Percentiles = new double[] { 0.1, 0.5, 0.9 }; DFSBase outf = DfsFileFactory.CreateFile(@"..\..\..\TestData\TestDataSet_percentiles_limit.dfs2", Percentiles.Count()); DFSBase outf2 = DfsFileFactory.CreateFile(@"..\..\..\TestData\TestDataSet_percentiles.dfs2", Percentiles.Count()); outf.CopyFromTemplate(target); outf2.CopyFromTemplate(target); int Item = 1; int k = 0; //Create the items foreach (double j in Percentiles) { outf.Items[k].EumItem = target.Items[Item - 1].EumItem; outf.Items[k].EumUnit = target.Items[Item - 1].EumUnit; outf.Items[k].Name = j.ToString() + " Percentile"; outf2.Items[k].EumItem = target.Items[Item - 1].EumItem; outf2.Items[k].EumUnit = target.Items[Item - 1].EumUnit; outf2.Items[k].Name = j.ToString() + " Percentile"; k++; } int[] TSteps = new int[target.NumberOfTimeSteps]; for (int i = 0; i < target.NumberOfTimeSteps; i++) { TSteps[i] = i; } Stopwatch sw = new Stopwatch(); sw.Start(); target.Percentile(Item, TSteps, outf, Percentiles, 10); sw.Stop(); TimeSpan el = sw.Elapsed; sw.Reset(); sw.Start(); target.Percentile(Item, TSteps, outf2, Percentiles); sw.Stop(); TimeSpan el2 = sw.Elapsed; outf.Dispose(); outf2.Dispose(); target.Dispose(); DFS2 fil1 = new DFS2(@"..\..\..\TestData\TestDataSet_percentiles_limit.dfs2"); DFS2 fil2 = new DFS2(@"..\..\..\TestData\TestDataSet_percentiles.dfs2"); for (int i = 1; i <= Percentiles.Count(); i++) { var m1 = fil1.GetData(0, i); var m2 = fil2.GetData(0, i); for (int j = 0; j < m1.Data.Count(); j++) { Assert.AreEqual(m1.Data[j], m2.Data[j]); } } fil1.Dispose(); fil2.Dispose(); }
public static void InsertPointValues(XElement OperationData) { string filename = OperationData.Element("DFSFileName").Value; int Item = OperationData.Element("Item") == null ? 1 : int.Parse(OperationData.Element("Item").Value); bool ClearValues = OperationData.Element("ClearValues") == null ? true: bool.Parse(OperationData.Element("ClearValues").Value); List <Tuple <double, double, int, int, double> > points = new List <Tuple <double, double, int, int, double> >(); foreach (var p in OperationData.Element("Points").Elements()) { Tuple <double, double, int, int, double> point = new Tuple <double, double, int, int, double>( p.Element("X") == null ? -1 : double.Parse(p.Element("X").Value), p.Element("Y") == null ? -1 : double.Parse(p.Element("Y").Value), p.Element("Z") == null ? 0 : int.Parse(p.Element("Z").Value), p.Element("TimeStep") == null ? 0 : int.Parse(p.Element("TimeStep").Value), double.Parse(p.Element("Value").Value)); points.Add(point); } if (Path.GetExtension(filename).EndsWith("0")) { using (DFS0 dfs = new DFS0(filename)) { if (ClearValues) { for (int i = 0; i < dfs.NumberOfTimeSteps; i++) { dfs.SetData(i, Item, 0); } } foreach (var p in points) { dfs.SetData(p.Item4, Item, p.Item5); } } } else if (Path.GetExtension(filename).EndsWith("2")) { using (DFS2 dfs = new DFS2(filename)) { if (ClearValues) { for (int i = 0; i < dfs.NumberOfTimeSteps; i++) { dfs.SetData(i, Item, new DenseMatrix(dfs.NumberOfRows, dfs.NumberOfColumns)); } } foreach (var p in points) { var data = dfs.GetData(p.Item4, Item); int column = dfs.GetColumnIndex(p.Item1); int row = dfs.GetRowIndex(p.Item2); if (column >= 0 & row >= 0) { data[row, column] = p.Item5; } dfs.SetData(p.Item4, Item, data); } } } else if (Path.GetExtension(filename).EndsWith("3")) { using (DFS3 dfs = new DFS3(filename)) { if (ClearValues) { for (int i = 0; i < dfs.NumberOfTimeSteps; i++) { dfs.SetData(i, Item, new Matrix3d(dfs.NumberOfRows, dfs.NumberOfColumns, dfs.NumberOfLayers)); } } foreach (var p in points) { var data = dfs.GetData(p.Item4, Item); int column = dfs.GetColumnIndex(p.Item1); int row = dfs.GetRowIndex(p.Item2); if (column >= 0 & row >= 0) { data[row, column, p.Item3] = p.Item5; } dfs.SetData(p.Item4, Item, data); } } } }