//write dynamic data for this item and the given timestep to the file. //NB make sure to write items (=call this function) in the right order (=the order of the items)! public int WriteDynData(double timeStep, float[] data) { if (Items.Length < 1) { return(_err("Add dynamic items before calling writeDynData.")); } if (compressed) { //compress data using the compress keys int nox = Items[0].nPointsX; int noy = Items[0].nPointsY; for (int i = 1; i < Items.Length; i++) { if (Items[i].nPointsX != nox || Items[i].nPointsY != noy) { return(_err("writeDynData, compressed data: not all dynamic items have same length", -2)); } } float[] cdata = new float[compress_XKey.Length]; for (int i = 0; i < cdata.Length; i++) { cdata[i] = data[compress_ZKey[i] * noy * nox + nox * compress_YKey[i] + compress_XKey[i]]; } DfsDLLWrapper.dfsWriteItemTimeStep(pHeader, pFile, timeStep, cdata); } else { DfsDLLWrapper.dfsWriteItemTimeStep(pHeader, pFile, timeStep, data); } return(0); }
public int Write(IntPtr pItem) { DfsDLLWrapper.dfsSetItemInfo(fileInfoRef.pHeader, pItem, (int)this.EUMType, this.Name, (int)this.EUMUnit, this.dataType); switch (this.sAxisType) { case SpaceAxisType.EqD0: DfsDLLWrapper.dfsSetItemAxisEqD0(pItem, (int)axisEUMUnit); break; case SpaceAxisType.EqD1: DfsDLLWrapper.dfsSetItemAxisEqD1(pItem, (int)axisEUMUnit, nPointsX, XMinLimit, DX); break; case SpaceAxisType.EqD2: DfsDLLWrapper.dfsSetItemAxisEqD2(pItem, (int)axisEUMUnit, nPointsX, nPointsY, XMinLimit, YMinLimit, DX, DY); break; case SpaceAxisType.EqD3: DfsDLLWrapper.dfsSetItemAxisEqD3(pItem, (int)axisEUMUnit, nPointsX, nPointsY, nPointsZ, XMinLimit, YMinLimit, ZMinLimit, DX, DY, DZ); break; default: return(_err("write does not support space axis " + sAxisType.ToString())); } return(0); }
/// <summary> /// Writes timestep and starttime /// </summary> protected void WriteTime() { if (!_initializedForWriting) { InitializeForWriting(); } switch (_timeAxis) { case TimeAxisType.CalendarEquidistant: DfsDLLWrapper.dfsSetEqCalendarAxis(_headerPointer, TimeSteps.First().ToString("yyyy-MM-dd"), TimeSteps.First().ToString("HH:mm:ss"), (int)timeStepUnit, 0, _timeStep.TotalSeconds, 0); break; case TimeAxisType.CalendarNonEquidistant: DfsDLLWrapper.dfsSetNeqCalendarAxis(_headerPointer, TimeSteps.First().ToString("yyyy-MM-dd"), TimeSteps.First().ToString("HH:mm:ss"), (int)timeStepUnit, 0, 0); break; case TimeAxisType.TimeEquidistant: break; case TimeAxisType.TimeNonEquidistant: break; case TimeAxisType.Undefined: break; default: break; } }
/// <summary> /// Moves to the timestep and item /// Returns true if it was actually necessary to move /// Note that it is not possible to move backwards into something that has been written without /// </summary> /// <param name="TimeStep"></param> /// <param name="Item"></param> /// <returns></returns> private bool MoveToItemTimeStep(int TimeStep, int Item) { TimeStep = Math.Min(TimeStep, NumberOfTimeStepsWritten); Item = Math.Min(Item, NumberOfItems); if (TimeStep != _currentTimeStep || Item != _currentItem) { _currentTimeStep = TimeStep; _currentItem = Item; if (TimeStep == NumberOfTimeStepsWritten) { DfsDLLWrapper.dfsFindItemDynamic(_headerPointer, _filePointer, TimeStep - 1, NumberOfItems); //Spools to last item DfsDLLWrapper.dfsSkipItem(_headerPointer, _filePointer); // now at end _currentItem = 1; return(true); } else { //Spools to the correct Item and TimeStep DfsDLLWrapper.dfsFindItemDynamic(_headerPointer, _filePointer, TimeStep, Item); return(true); } } return(false); }
public DFS2DBase(string DFSFileName, int NumberOfItems) : base(DFSFileName, NumberOfItems) { //Create the header _headerPointer = DfsDLLWrapper.dfsHeaderCreate(FileType.EqtimeFixedspaceAllitems, "Title", "HydroNumerics", 1, NumberOfItems, StatType.NoStat); _timeAxis = TimeAxisType.CalendarEquidistant; }
public int Close() { int rc = 0; DfsDLLWrapper.dfsFileClose(pHeader, ref pFile); DfsDLLWrapper.dfsHeaderDestroy(ref pHeader); return(rc); }
/// <summary> /// Reads data for the TimeStep and Item and fills them into the buffer. /// Time steps counts from 0 and Item from 1. /// </summary> /// <param name="TimeStep"></param> /// <param name="Item"></param> public float[] ReadItemTimeStep(int TimeStep, int Item) { double time = 0; MoveToItemTimeStep(TimeStep, Item); DfsDLLWrapper.dfsReadItemTimeStep(_headerPointer, _filePointer, out time, dfsdata); IncrementItemTimeStep(); return(dfsdata); }
internal void WriteItemInfo(Item I) { if (!_initializedForWriting) { InitializeForWriting(); } DfsDLLWrapper.dfsSetItemInfo(_headerPointer, I.ItemPointer, (int)I.EumItem, I.Name, (int)I.EumUnit, DfsSimpleType.Float); DfsDLLWrapper.dfsSetItemValueType(I.ItemPointer, I.ValueType); }
public DFS0(string DFSFileName, int NumberOfItems) : base(DFSFileName, NumberOfItems) { //Create the header _headerPointer = DfsDLLWrapper.dfsHeaderCreate(FileType.NeqtimeFixedspaceAllitems, "Title", "HydroNumerics", 1, NumberOfItems, StatType.RegularStat); _timeAxis = TimeAxisType.CalendarNonEquidistant; _spaceAxis = SpaceAxisType.EqD0; Data = new SortedList <DateTime, DenseVector>(); }
public int ReadStatic(int staticItemNo) { DfsDLLWrapper.dfsFindItemStatic(fileInfoRef.pHeader, fileInfoRef.pFile, staticItemNo); staticVectorPtr = DfsDLLWrapper.dfsStaticRead(fileInfoRef.pFile); IntPtr sItem = DfsDLLWrapper.dfsItemS(staticVectorPtr); return(Read(sItem)); }
protected virtual void Dispose(bool disposing) { if (disposing) { dfsdata = null; } if (_headerPointer != IntPtr.Zero) { DfsDLLWrapper.dfsFileClose(_headerPointer, ref _filePointer); } }
/// <summary> /// Opens the file for writing. First closes the file since it has already been opened for reading /// </summary> protected void InitializeForWriting() { Dispose(false); DfsDLLWrapper.dfsFileEdit(_filename, out _headerPointer, out _filePointer); _initializedForWriting = true; for (int i = 0; i < NumberOfItems; i++) { var ip = DfsDLLWrapper.dfsItemD(_headerPointer, i + 1); Items[i].ItemPointer = ip; } }
public int WriteStatic(int itemNo) { staticVectorPtr = DfsDLLWrapper.dfsStaticCreate(); IntPtr sItm = DfsDLLWrapper.dfsItemS(staticVectorPtr); int rc = Write(sItm); //write header information if (rc != 0) { return(rc); } return(0); }
/// <summary> /// Writes data for the TimeStep and Item /// </summary> /// <param name="TimeStep"></param> /// <param name="Item"></param> public void WriteItemTimeStep(int TimeStep, int Item, float[] data) { if (!_initializedForWriting) { InitializeForWriting(); } if (_filePointer == IntPtr.Zero) { CreateFile(); } MoveToItemTimeStep(TimeStep, Item); double time = 0; if (_timeAxis == TimeAxisType.CalendarNonEquidistant & _currentTimeStep > 0) { TimeSpan ts = TimeSteps[_currentTimeStep].Subtract(TimeSteps[0]); switch (timeStepUnit) { case TimeInterval.Second: time = ts.TotalSeconds; break; case TimeInterval.Minute: time = ts.TotalMinutes; break; case TimeInterval.Hour: time = ts.TotalHours; break; default: break; } } if (EndOfFile) { if (_currentItem == NumberOfItems) { NumberOfTimeStepsWritten++; } AppendTimeStep(TimeSteps.Last().Add(_timeStep)); } //Writes the data DfsDLLWrapper.dfsWriteItemTimeStep(_headerPointer, _filePointer, time, data); IncrementItemTimeStep(); }
//read dynamic data from the given item number and timestep into the output array and give the time offset of the given timestep from the start of the file in dT public int ReadDynData(int timeStep, int itemNo, out float[] data, out double dT) { dT = 0; data = new float[Items[itemNo - 1].TotNoPoints]; DfsDLLWrapper.dfsFindItemDynamic(pHeader, pFile, timeStep, itemNo); // find item //dfsReadItemTimeStep always returns uncompressed data bool isRead = DfsDLLWrapper.dfsReadItemTimeStep(pHeader, pFile, out dT, data); if (!isRead) { throw new Exception("dfsReadItemTimeStep fail."); } return(0); }
protected void WriteGeoInfo() { if (!_initializedForWriting) { InitializeForWriting(); } DfsDLLWrapper.dfsSetGeoInfoUTMProj(_headerPointer, "NON-UTM", _xOrigin, _yOrigin, _orientation); foreach (Item I in Items) { WriteItemInfo(I); if (_spaceAxis == SpaceAxisType.EqD2) { DfsDLLWrapper.dfsSetItemAxisEqD2(I.ItemPointer, 1000, _numberOfColumns, _numberOfRows, 0, 0, (float)_gridSize, (float)_gridSize); } } }
public int Read(IntPtr pItem) { int eumT = 0, eumU = 0; DfsSimpleType dataT = DfsSimpleType.Int; DfsDLLWrapper.dfsGetItemInfo(pItem, out eumT, out EUMTypeString, out Name, out eumU, out EUMUnitString, out dataT); EUMType = (eumItem)eumT; EUMUnit = (eumUnit)eumU; dataType = (DfsSimpleType)dataT; //if (dataType != UfsSimpleType.UFS_FLOAT)return err("Only float dataType supported."); dim = DfsDLLWrapper.dfsGetItemDim(pItem); dataValType = DfsDLLWrapper.dfsGetItemValueType(pItem); sAxisType = (SpaceAxisType)DfsDLLWrapper.dfsGetItemAxisType(pItem); switch (sAxisType) { case SpaceAxisType.EqD0: DfsDLLWrapper.dfsGetItemAxisEqD0(pItem, out eumU, out axisEUMUnitString); nPointsX = 1; break; case SpaceAxisType.EqD1: DfsDLLWrapper.dfsGetItemAxisEqD1(pItem, out eumU, out axisEUMUnitString, out m_nPointsX, out XMinLimit, out DX); break; case SpaceAxisType.EqD2: DfsDLLWrapper.dfsGetItemAxisEqD2(pItem, out eumU, out axisEUMUnitString, out m_nPointsX, out m_nPointsY, out XMinLimit, out YMinLimit, out DX, out DY); break; case SpaceAxisType.EqD3: DfsDLLWrapper.dfsGetItemAxisEqD3(pItem, out eumU, out axisEUMUnitString, out m_nPointsX, out m_nPointsY, out m_nPointsZ, out XMinLimit, out YMinLimit, out ZMinLimit, out DX, out DY, out DZ); break; default: return(_err("Unsupported space axis " + sAxisType.ToString())); } axisEUMUnit = (eumUnit)eumU; return(0); }
//write static data. NB: call DFSWrapper.dfsFileCreate before thsi function (or else there is no file to write data to) public int WriteStaticData() { switch (dataType) { case DfsSimpleType.Float: DfsDLLWrapper.dfsStaticWrite(staticVectorPtr, fileInfoRef.pFile, staticDataFloat); break; case DfsSimpleType.Int: DfsDLLWrapper.dfsStaticWrite(staticVectorPtr, fileInfoRef.pFile, staticDataInt); break; default: return(_err("Unsupported static datatype (" + dataType.ToString() + ")", -16)); } DfsDLLWrapper.dfsStaticDestroy(ref staticVectorPtr); return(0); }
//Gets the timespan for a time step using readitemtimestep. Should only be used with CalendarNonEquidistant protected TimeSpan GetTimeSpan(int TimeStep) { TimeSpan ts = TimeSpan.Zero; double time = 0; while (_currentTimeStep < TimeStep) { IncrementItemTimeStep(); DfsDLLWrapper.dfsSkipItem(_headerPointer, _filePointer); } DfsDLLWrapper.dfsReadItemTimeStep(_headerPointer, _filePointer, out time, dfsdata); IncrementItemTimeStep(); switch (this.timeStepUnit) { case TimeInterval.Second: ts = TimeSpan.FromSeconds(time); break; case TimeInterval.Minute: ts = TimeSpan.FromMinutes(time); break; case TimeInterval.Hour: ts = TimeSpan.FromHours(time); break; case TimeInterval.Day: break; case TimeInterval.Month: break; case TimeInterval.Year: break; default: break; } return(ts); }
public int ReadStaticData() { switch (dataType) { case DfsSimpleType.Float: staticDataFloat = new float[TotNoPoints]; DfsDLLWrapper.dfsStaticGetData(staticVectorPtr, staticDataFloat); break; case DfsSimpleType.Int: staticDataInt = new int[TotNoPoints]; DfsDLLWrapper.dfsStaticGetData(staticVectorPtr, staticDataInt); break; default: return(_err("Unsupported static datatype (" + dataType.ToString() + ")", -16)); } DfsDLLWrapper.dfsStaticDestroy(ref staticVectorPtr); return(0); }
public virtual void CopyFromTemplate(DFSBase dfs) { _timeAxis = dfs._timeAxis; if (dfs._timeAxis == TimeAxisType.CalendarEquidistant || dfs._timeAxis == TimeAxisType.TimeEquidistant) { this.TimeOfFirstTimestep = dfs.TimeOfFirstTimestep; this.TimeStep = dfs.TimeStep; } this.DeleteValue = dfs.DeleteValue; if (DfsDLLWrapper.dfsIsFileCompressed(dfs._headerPointer)) { var en = DfsDLLWrapper.dfsGetEncodeKeySize(dfs._headerPointer); int[] xkey = new int[en]; int[] ykey = new int[en]; int[] zkey = new int[en]; DfsDLLWrapper.dfsGetEncodeKey(dfs._headerPointer, xkey, ykey, zkey); DfsDLLWrapper.dfsSetEncodeKey(_headerPointer, xkey, ykey, zkey, en); } }
public virtual void CopyFromTemplate(DFSBase dfs) { _timeAxis = dfs._timeAxis; if (dfs._timeAxis == TimeAxisType.CalendarEquidistant || dfs._timeAxis == TimeAxisType.TimeEquidistant) { this.TimeOfFirstTimestep = dfs.TimeOfFirstTimestep; this.TimeStep = dfs.TimeStep; } this.DeleteValue = dfs.DeleteValue; if (DfsDLLWrapper.dfsIsFileCompressed(dfs._headerPointer)) { var en = DfsDLLWrapper.dfsGetEncodeKeySize(dfs._headerPointer); int[] xkey = new int[en]; int[] ykey = new int[en]; int[] zkey = new int[en]; DfsDLLWrapper.dfsGetEncodeKey(dfs._headerPointer, xkey, ykey, zkey); //Adjust z-count if we go from dfs3 to dfs2 if (_numberOfLayers == 1 & dfs._numberOfLayers > 1) { en = en / dfs._numberOfLayers; xkey = xkey.Take(en).ToArray(); ykey = ykey.Take(en).ToArray(); zkey = zkey.Take(en).ToArray(); for (int i = 0; i < en; i++) { zkey[i] = 0; } } DfsDLLWrapper.dfsSetEncodeKey(_headerPointer, xkey, ykey, zkey, en); } }
private void CreateFile() { WriteGeoInfo(); WriteTime(); foreach (Item I in Items) { WriteItemInfo(I); if (_spaceAxis == SpaceAxisType.EqD2) { DfsDLLWrapper.dfsSetItemAxisEqD2(I.ItemPointer, 1000, _numberOfColumns, _numberOfRows, 0, 0, (float)_gridSize, (float)_gridSize); } else if (_spaceAxis == SpaceAxisType.EqD3) { DfsDLLWrapper.dfsSetItemAxisEqD3(I.ItemPointer, 1000, _numberOfColumns, _numberOfRows, _numberOfLayers, 0, 0, 0, (float)_gridSize, (float)_gridSize, (float)_gridSize); } else if (_spaceAxis == SpaceAxisType.EqD0) { DfsDLLWrapper.dfsSetItemAxisEqD0(I.ItemPointer, 1000); } } DfsDLLWrapper.dfsFileCreate(FileName, _headerPointer, out _filePointer); }
public int WriteToFile(string dfsFileName) { //create header FileType d = dfsFileType; pHeader = DfsDLLWrapper.dfsHeaderCreate(d, FileTitle, "DfsFileInfo", 1, Items.Length, statType); DfsDLLWrapper.dfsSetDataType(pHeader, DataType); //delval DfsDLLWrapper.dfsSetDeleteValFloat(pHeader, delVal); int rc = 0; switch (CustomBlockName) { case "MIKE_FM": int[] dfsuCBData = new int[4]; dfsuCBData[0] = dfsuCustBlock.NoNodesTot; dfsuCBData[1] = dfsuCustBlock.NoElemTot; dfsuCBData[2] = dfsuCustBlock.Dim; dfsuCBData[3] = dfsuCustBlock.NoLayers; DfsDLLWrapper.dfsAddCustomBlock(pHeader, "MIKE_FM", dfsuCBData); break; case "M21_Misc": float[] dM21CBData = new float[7]; dM21CBData[0] = (float)Orientation; dM21CBData[1] = m21CustBlock.f1; dM21CBData[2] = m21CustBlock.f2; dM21CBData[3] = m21CustBlock.f3; dM21CBData[4] = m21CustBlock.f4; dM21CBData[5] = m21CustBlock.f5; dM21CBData[6] = m21CustBlock.f6; DfsDLLWrapper.dfsAddCustomBlock(pHeader, "M21_Misc", dM21CBData); break; case "Unknown": break; default: //JdfsMisc.log("Warning: unsupported CustomBlockName encountered (" + CustomBlockName + "). Custom block not written."); break; } //projection if (Projection_type == ProjectionType.Projection) { DfsDLLWrapper.dfsSetGeoInfoUTMProj(pHeader, Projection, Longitude, Latitude, Orientation); } //timeaxis switch (this.tAxisType) { case TimeAxisType.CalendarEquidistant: DfsDLLWrapper.dfsSetEqCalendarAxis(pHeader, this.tAxis_StartDateStr, this.tAxis_StartTimeStr, (int)tAxis_EUMUnit, tAxis_dTStart, this.tAxis_dTStep, this.tAxis_indexTStart); break; case TimeAxisType.CalendarNonEquidistant: DfsDLLWrapper.dfsSetNeqCalendarAxis(pHeader, this.tAxis_StartDateStr, this.tAxis_StartTimeStr, (int)tAxis_EUMUnit, this.tAxis_dTStart, this.tAxis_indexTStart); break; default: _err("write of " + tAxisType.ToString() + " not supported"); break; } if (compressed) { if ((compress_XKey.Length < 1) || (compress_XKey.Length != compress_YKey.Length || compress_XKey.Length != compress_ZKey.Length)) { _err("Compress keys does not have same length or is empty. Compression disabled."); compressed = false; } else { DfsDLLWrapper.dfsItemEnableCompression(pHeader); DfsDLLWrapper.dfsSetEncodeKey(pHeader, compress_XKey, compress_YKey, compress_ZKey, compress_XKey.Length); } } //Dynamic Items for (int i = 1; i < Items.Length + 1; i++) { Items[i - 1].fileInfoRef = this; rc = Items[i - 1].Write(i); if (rc != 0) { return(rc); } } //Static Items if (staticItems != null) { for (int i = 1; i < staticItems.Length + 1; i++) { staticItems[i - 1].fileInfoRef = this; rc = staticItems[i - 1].WriteStatic(i); if (rc != 0) { return(rc); } } } pFile = (IntPtr)0; DfsDLLWrapper.dfsFileCreate(dfsFileName, pHeader, out pFile); //write static data if (staticItems != null && staticItems.Length > 0 && writeStaticDataOnWrite) { rc = WriteStaticData(); if (rc != 0) { return(rc); } } m_fileName = dfsFileName; return(rc); }
public int Write(int itemNo) { IntPtr pItem = DfsDLLWrapper.dfsItemD(fileInfoRef.pHeader, itemNo); return(Write(pItem)); }
private void _convert2Dfs2() { IntPtr headerPointer = new IntPtr(); IntPtr filePointer = new IntPtr(); try { int maxTimeStep = _getTimeSteps(); _customDFSGrid = false; // Create header System.Reflection.AssemblyName assName = this.GetType().Assembly.GetName(); //if (maxTimeStep <= 1) headerPointer = DfsDLLWrapper.dfsHeaderCreate(FileType.EqtimeFixedspaceAllitems, System.IO.Path.GetFileNameWithoutExtension(_settings.InputFileName), assName.Name, assName.Version.Major, _getItemNum(), StatType.NoStat); /*else * headerPointer = DfsDLLWrapper.dfsHeaderCreate(FileType.NeqtimeFixedspaceAllitems, * System.IO.Path.GetFileNameWithoutExtension(_settings.InputFileName), assName.Name, * assName.Version.Major, _getItemNum(), StatType.NoStat);*/ // Setup header DfsDLLWrapper.dfsSetDataType(headerPointer, 1); double x0 = 0, y0 = 0, dx = 0, dy = 0, j = 0, k = 0, lon0 = 0, lat0 = 0; _getGridOrigo(out x0, out y0, out dx, out dy, out j, out k, out lat0, out lon0); DfsDLLWrapper.dfsSetGeoInfoUTMProj(headerPointer, _settings.MZMapProjectionString, lon0, lat0, _settings.OverwriteRotation); DfsDLLWrapper.dfsSetDeleteValFloat(headerPointer, _fdel); List <DateTime> dateTimes = _util.GetTime(_settings.TimeAxisName); //compute timesteps double timestepSec = 0; for (int timeSteps = 1; timeSteps < dateTimes.Count; timeSteps++) { timestepSec = Math.Round((dateTimes[timeSteps].ToOADate() - dateTimes[timeSteps - 1].ToOADate()) * 86400); } //DfsDLLWrapper.dfsSetEqCalendarAxis(headerPointer, dateTimes[0].ToString("yyyy-MM-dd"), dateTimes[0].ToString("HH:mm:ss"), (int)eumUnit.eumUsec, 0, (int)timestepSec, 0); if (maxTimeStep <= 1) { DfsDLLWrapper.dfsSetEqCalendarAxis(headerPointer, dateTimes[0].ToString("yyyy-MM-dd"), dateTimes[0].ToString("HH:mm:ss"), (int)eumUnit.eumUsec, 0, _settings.TimeStepSeconds, 0); } else { DfsDLLWrapper.dfsSetEqCalendarAxis(headerPointer, dateTimes[0].ToString("yyyy-MM-dd"), dateTimes[0].ToString("HH:mm:ss"), (int)eumUnit.eumUsec, 0, (int)timestepSec, 0); } // Add Items by looping through selected variables int selectedItemCount = 0; for (int itemCount = 0; itemCount < _settings.Variables.Count; itemCount++) { if (_settings.IsVariablesSelected[itemCount]) { IntPtr itemPointer = DfsDLLWrapper.dfsItemD(headerPointer, selectedItemCount + 1); string itemName = _settings.Variables[itemCount]; DfsDLLWrapper.dfsSetItemInfo(headerPointer, itemPointer, _settings.VariablesMappings[itemCount].EUMItemKey, _settings.VariablesMappings[itemCount].EUMItemDesc, _settings.VariablesMappings[itemCount].EUMMappedItemUnitKey, DfsSimpleType.Float); DfsDLLWrapper.dfsSetItemValueType(itemPointer, DataValueType.Instantaneous); //get grid data from nc dimensions //swap range if dx or dy is negative if (dx <= 0) { dx = Math.Abs(dx); _invertxData = true; } if (dy <= 0) { dy = Math.Abs(dy); _invertyData = true; } DfsDLLWrapper.dfsSetItemAxisEqD2(itemPointer, (int)eumUnit.eumUdegree, (int)j, (int)k, (float)x0, (float)y0, (float)dx, (float)dy); selectedItemCount++; } } // Create file DfsDLLWrapper.dfsFileCreate(_settings.OutputFileName, headerPointer, out filePointer); //write data to file (time loop > item loop) for (int timeSteps = 0; timeSteps < dateTimes.Count; timeSteps++) { selectedItemCount = 0; for (int itemCount = 0; itemCount < _settings.Variables.Count; itemCount++) { if (_settings.IsVariablesSelected[itemCount]) { string itemName = _settings.Variables[itemCount]; double dTotalSeconds = (dateTimes[timeSteps].ToOADate() - dateTimes[0].ToOADate()) * 86400; dTotalSeconds = Math.Round(dTotalSeconds, 0, MidpointRounding.AwayFromZero); DfsDLLWrapper.dfsWriteItemTimeStep(headerPointer, filePointer, dTotalSeconds, _getFloatData(itemName, timeSteps, j, k, lat0, lon0, dx, dy)); selectedItemCount++; } } } } catch (Exception ex) { throw new Exception("Convert2Dfs2 Error: " + ex.Message); } finally { // close file and destroy header if (null != filePointer) { DfsDLLWrapper.dfsFileClose(headerPointer, ref filePointer); } if (null != headerPointer) { DfsDLLWrapper.dfsHeaderDestroy(ref headerPointer); } } }
/// <summary> /// Opens an existing dfs-file /// </summary> /// <param name="DFSFileName"></param> public DFSBase(string DFSFileName) : this() { _filename = DFSFileName; AbsoluteFileName = Path.GetFullPath(DFSFileName); try { DfsDLLWrapper.dfsFileRead(AbsoluteFileName, out _headerPointer, out _filePointer); } catch (Exception e) { return; //Not a valid file. } NumberOfItems = DfsDLLWrapper.dfsGetNoOfItems(_headerPointer); string eum_unit = ""; int unit = 0; int data_type = 0; int item_type = 0; float x = 0; float y = 0; float z = 0; float dx = 0; float dy = 0; float dz = 0; IntPtr name = new IntPtr(); //Reads the projection LastStatus = dfsGetGeoInfoUTMProj(_headerPointer, ref name, ref _xOrigin, ref _yOrigin, ref _orientation); //Reads the space axis _spaceAxis = (SpaceAxisType)DfsDLLWrapper.dfsGetItemAxisType(FirstItem.ItemPointer); //Now read axes info dependent on the type of axis switch (_spaceAxis) { case SpaceAxisType.CurveLinearD2: break; case SpaceAxisType.CurveLinearD3: break; case SpaceAxisType.EqD0: break; case SpaceAxisType.EqD1: break; case SpaceAxisType.EqD2: //DFS2 from MikeShe DfsDLLWrapper.dfsGetItemAxisEqD2(FirstItem.ItemPointer, out item_type, out eum_unit, out _numberOfColumns, out _numberOfRows, out x, out y, out dx, out dy); break; case SpaceAxisType.EqD3: //DFS3 from MikeShe DfsDLLWrapper.dfsGetItemAxisEqD3(FirstItem.ItemPointer, out item_type, out eum_unit, out _numberOfColumns, out _numberOfRows, out _numberOfLayers, out x, out y, out z, out dx, out dy, out dz); break; case SpaceAxisType.NeqD1: var coords = new Coords[1]; DfsDLLWrapper.dfsGetItemAxisNeqD1(FirstItem.ItemPointer, out unit, out eum_unit, out data_type, out coords); break; case SpaceAxisType.NeqD2: break; case SpaceAxisType.NeqD3: break; case SpaceAxisType.Undefined: break; default: break; } _gridSize = dx; //Prepares an array of floats to recieve the data dfsdata = new float[_numberOfColumns * _numberOfRows * _numberOfLayers]; //Now look at time axis _timeAxis = (TimeAxisType)DfsDLLWrapper.dfsGetTimeAxisType(_headerPointer); string startdate = ""; string starttime = ""; double tstart = 0; double tstep = 0; int nt = 0; int tindex = 0; switch (_timeAxis) { case TimeAxisType.TimeEquidistant: //Some DFS2 here DfsDLLWrapper.dfsGetEqTimeAxis(_headerPointer, out unit, out eum_unit, out tstart, out tstep, out nt, out tindex); break; case TimeAxisType.CalendarEquidistant: //Dfs2 and dfs3 here DfsDLLWrapper.dfsGetEqCalendarAxis(_headerPointer, out startdate, out starttime, out unit, out eum_unit, out tstart, out tstep, out nt, out tindex); if (unit == 1400) { _timeStep = TimeSpan.FromSeconds(tstep); } else if (unit == 1401) //This is a guess { _timeStep = TimeSpan.FromMinutes(tstep); } else if (unit == 1402) { _timeStep = TimeSpan.FromHours(tstep); } break; case TimeAxisType.TimeNonEquidistant: //This has not been tested DfsDLLWrapper.dfsGetNeqTimeAxis(_headerPointer, out unit, out eum_unit, out tstart, out tstep, out nt, out tindex); break; case TimeAxisType.CalendarNonEquidistant://Only dfs0 can have varying time steps DfsDLLWrapper.dfsGetNeqCalendarAxis(_headerPointer, out startdate, out starttime, out unit, out eum_unit, out tstart, out tstep, out nt, out tindex); break; case TimeAxisType.Undefined: break; default: break; } NumberOfTimeStepsWritten = nt; timeStepUnit = (TimeInterval)unit; if (_timeAxis == TimeAxisType.CalendarNonEquidistant | _timeAxis == TimeAxisType.TimeEquidistant) { _times = new double[nt]; } if (startdate != "" & starttime != "") { _timesteps.Add(DateTime.Parse(startdate).Add(TimeSpan.Parse(starttime))); } else //Time equidistant files enter here. { _timesteps.Add(new DateTime(2002, 1, 1)); } //Now build the list of timesteps for (int i = 1; i < nt; i++) { if (_timeAxis == TimeAxisType.CalendarNonEquidistant) //dfs0 with time varying. { _timesteps.Add(_timesteps[0].Add(GetTimeSpan(i))); } else { _timesteps.Add(_timesteps[i - 1].Add(_timeStep)); } } }
public int ReadDfsFile(string dfsFileName) { int rc; DfsDLLWrapper.dfsFileRead(dfsFileName, out pHeader, out pFile); m_fileName = dfsFileName; compressed = (DfsDLLWrapper.dfsIsFileCompressed(pHeader)); if (compressed) { encodeKeySize = DfsDLLWrapper.dfsGetEncodeKeySize(pHeader); if (encodeKeySize > 0) { compress_XKey = new int[encodeKeySize]; compress_YKey = new int[encodeKeySize]; compress_ZKey = new int[encodeKeySize]; DfsDLLWrapper.dfsGetEncodeKey(pHeader, compress_XKey, compress_YKey, compress_ZKey); } else { compressed = false; } } // general info about file dfsFileType = (FileType)DfsDLLWrapper.dfsGetFileType(pHeader); DataType = DfsDLLWrapper.dfsGetDataType(pHeader); //delete value delVal = DfsDLLWrapper.dfsGetDeleteValFloat(pHeader); //statisics type statType = DfsDLLWrapper.dfsGetItemStatsType(pHeader); //Custom blocks DfsSimpleType iDataType = DfsSimpleType.Float; int iMiscVarNos = 0; IntPtr pData = pHeader; IntPtr pNextBlock = pHeader; IntPtr pBlock = pHeader; pBlock = DfsDLLWrapper.dfsGetCustomBlockRef(pHeader); if (pBlock.ToInt32() != 0) { DfsDLLWrapper.dfsGetCustomBlock(pBlock, out iDataType, out CustomBlockName, out iMiscVarNos, ref pData, out pNextBlock); switch ((DfsSimpleType)iDataType) { case DfsSimpleType.Float: custBlockDataFloat = new float[iMiscVarNos]; Marshal.Copy(pData, custBlockDataFloat, 0, custBlockDataFloat.Length); // copy data from pointer to array break; case DfsSimpleType.Int: custBlockDataInt = new int[iMiscVarNos]; Marshal.Copy(pData, custBlockDataInt, 0, custBlockDataInt.Length); // copy data from pointer to array break; default: throw new Exception("Unsupported CustomBlock data tyoe encountered (" + iDataType + "."); } if (CustomBlockName == "MIKE_FM") { //dfsu if (custBlockDataInt.Length > 0) { dfsuCustBlock.NoNodesTot = custBlockDataInt[0]; } if (custBlockDataInt.Length > 1) { dfsuCustBlock.NoElemTot = custBlockDataInt[1]; } if (custBlockDataInt.Length > 2) { dfsuCustBlock.Dim = custBlockDataInt[2]; } if (custBlockDataInt.Length > 3) { dfsuCustBlock.NoLayers = custBlockDataInt[3]; } } else if (CustomBlockName == "M21_Misc") { if (custBlockDataFloat.Length > 0) { m21CustBlock.ori = custBlockDataFloat[0]; //m_LITOrientation } if (custBlockDataFloat.Length > 1) { m21CustBlock.f1 = custBlockDataFloat[1]; } if (custBlockDataFloat.Length > 2) { m21CustBlock.f2 = custBlockDataFloat[2]; } if (custBlockDataFloat.Length > 3) { m21CustBlock.f3 = custBlockDataFloat[3]; //m_LandValue } if (custBlockDataFloat.Length > 4) { m21CustBlock.f4 = custBlockDataFloat[4]; } if (custBlockDataFloat.Length > 5) { m21CustBlock.f5 = custBlockDataFloat[5]; } if (custBlockDataFloat.Length > 6) { m21CustBlock.f6 = custBlockDataFloat[6]; //m_GISLITOrientation } } } //time axis tAxisType = (TimeAxisType)DfsDLLWrapper.dfsGetTimeAxisType(pHeader); switch (tAxisType) { case TimeAxisType.CalendarEquidistant: DfsDLLWrapper.dfsGetEqCalendarAxis(pHeader, out tAxis_StartDateStr, out tAxis_StartTimeStr, out tAxis_EUMUnit, out tAxis_EUMUnitStr, out tAxis_dTStart, out tAxis_dTStep, out tAxis_nTSteps, out tAxis_indexTStart); break; case TimeAxisType.Undefined: DfsDLLWrapper.dfsGetEqCalendarAxis(pHeader, out tAxis_StartDateStr, out tAxis_StartTimeStr, out tAxis_EUMUnit, out tAxis_EUMUnitStr, out tAxis_dTStart, out tAxis_dTStep, out tAxis_nTSteps, out tAxis_indexTStart); break; case TimeAxisType.CalendarNonEquidistant: DfsDLLWrapper.dfsGetNeqCalendarAxis(pHeader, out tAxis_StartDateStr, out tAxis_StartTimeStr, out tAxis_EUMUnit, out tAxis_EUMUnitStr, out tAxis_dTStart, out tAxis_dTStep, out tAxis_nTSteps, out tAxis_indexTStart); break; case TimeAxisType.TimeEquidistant: DfsDLLWrapper.dfsGetEqTimeAxis(pHeader, out tAxis_EUMUnit, out tAxis_EUMUnitStr, out tAxis_dTStart, out tAxis_dTStep, out tAxis_nTSteps, out tAxis_indexTStart); break; case TimeAxisType.TimeNonEquidistant: DfsDLLWrapper.dfsGetNeqTimeAxis(pHeader, out tAxis_EUMUnit, out tAxis_EUMUnitStr, out tAxis_dTStart, out tAxis_dTStep, out tAxis_nTSteps, out tAxis_indexTStart); break; default: return(_err(tAxisType.ToString() + " not supported")); } //Projection Projection_type = (ProjectionType)DfsDLLWrapper.dfsGetGeoInfoType(pHeader); if (Projection_type == ProjectionType.Projection) { DfsDLLWrapper.dfsGetGeoInfoUTMProj(pHeader, out Projection, out Longitude, out Latitude, out Orientation); } //Dynamic Items int ItemCount = DfsDLLWrapper.dfsGetNoOfItems(pHeader); Items = new DfsItemInfo[ItemCount]; for (int i = 1; i < Items.Length + 1; i++) { Items[i - 1] = new DfsItemInfo(); Items[i - 1].fileInfoRef = this; Items[i - 1].Read(i); // reads header } //Static Items rc = 0; int sItemNo = 0; while (true) { sItemNo++; try { DfsDLLWrapper.dfsFindItemStatic(pHeader, pFile, sItemNo); } catch { break; }// no more static items } if (sItemNo > 0) { staticItems = new DfsItemInfo[sItemNo - 2]; for (int i = 0; i < staticItems.Length; i++) { staticItems[i] = new DfsItemInfo(); staticItems[i].fileInfoRef = this; rc = staticItems[i].ReadStatic(i + 1); // read header } if (readStaticDataOnRead) { rc = ReadStaticData(); } } return(rc); }
/// <summary> /// Calculates the Percentiles [0;1] of an Item. MaxEntriesInMemory is used to reduce the memory consumption by sweeping multiple times. Units is MB of memory /// </summary> public void Percentile(int Item, int[] TSteps, DFSBase df, double[] Percentiles, int MaxEntriesInMemory) { //List counts percentiles float[][] OutData = new float[Percentiles.Count()][]; for (int i = 0; i < Percentiles.Count(); i++) { OutData[i] = new float[dfsdata.Count()]; } List <int> steps = new List <int>(); steps.Add(0); //Get the delete values float delete = DfsDLLWrapper.dfsGetDeleteValFloat(_headerPointer); //Read first time step and create a list with the indeces of non-delete values ReadItemTimeStep(0, Item); List <int> NonDeleteEntries = new List <int>(); for (int i = 0; i < dfsdata.Length; i++) { if (dfsdata[i] != delete) { NonDeleteEntries.Add(i); } } //Find out how many sweeps are necessary to not exceed max memory double TotalData = (double)NonDeleteEntries.Count * (double)TSteps.Count(); if (TotalData > (MaxEntriesInMemory * 40000)) { int nsteps = (int)Math.Max(TotalData / (MaxEntriesInMemory * 40000), 1); int StepLength = NonDeleteEntries.Count() / nsteps; for (int i = 0; i < nsteps; i++) { steps.Add(steps.Last() + StepLength); } } steps.Add(NonDeleteEntries.Count); //Now start the loop for (int m = 0; m < steps.Count - 1; m++) { int dfscount = steps[m + 1] - steps[m]; //First iterater is dfsdata float[][] Data = new float[dfscount][]; for (int i = 0; i < Data.Count(); i++) { Data[i] = new float[TSteps.Count()]; } //Collect all data for (int i = 0; i < TSteps.Count(); i++) { var data = ReadItemTimeStep(TSteps[i], Item); int local = 0; for (int k = steps[m]; k < steps[m + 1]; k++) { Data[local][i] = (dfsdata[NonDeleteEntries[k]]); local++; } } int local2 = 0; for (int k = steps[m]; k < steps[m + 1]; k++) { //Convert to doubles from float double[] ddata = new double[TSteps.Count()]; for (int n = 0; n < TSteps.Count(); n++) { ddata[n] = Data[local2][n]; } //Calculate the percentile MathNet.Numerics.Statistics.Percentile pCalc = new MathNet.Numerics.Statistics.Percentile(ddata); pCalc.Method = MathNet.Numerics.Statistics.PercentileMethod.Excel; var p = pCalc.Compute(Percentiles); for (int l = 0; l < Percentiles.Count(); l++) { OutData[l][NonDeleteEntries[k]] = (float)p[l]; } local2++; } } //Insert deletevalues in output data for (int i = 0; i < dfsdata.Length; i++) { if (!NonDeleteEntries.Contains(i)) { for (int l = 0; l < Percentiles.Count(); l++) { OutData[l][i] = delete; } } } //Set Item infor for (int i = 0; i < Percentiles.Count(); i++) { df.Items[i].EumItem = Items[Item - 1].EumItem; df.Items[i].EumUnit = Items[Item - 1].EumUnit; df.Items[i].Name = Percentiles[i].ToString() + " percentile"; } for (int i = 0; i < Percentiles.Count(); i++) { df.WriteItemTimeStep(0, i + 1, OutData[i]); } }