public void LoadComparingValues() { OnLoading(0); var grid = Owner.Grid as MFGrid; int nstep = _nsite; int progress = 0; if (File.Exists(FileName)) { StreamReader sr = new StreamReader(FileName); ComparingValues = new DataCube <float>(2, 1, _nsite); ComparingValues.Allocate(0, 1, _nsite); ComparingValues.Allocate(1, 1, _nsite); string line = sr.ReadLine(); for (int i = 0; i < _nsite; i++) { line = sr.ReadLine(); if (!TypeConverterEx.IsNull(line)) { var vv = TypeConverterEx.Split <float>(line); ComparingValues[0, 0, i] = vv[0]; ComparingValues[1, 0, i] = vv[1]; } progress = Convert.ToInt32(i * 100 / nstep); OnLoading(progress); } if (progress < 100) { OnLoading(100); } ComparingValues.DateTimes = new DateTime[] { TimeService.Timeline[0] }; OnLoaded(_ProgressHandler); sr.Close(); } }
public override void LoadDataCube(int var_index) { OnLoading(0); if (MaxTimeStep <= 0 || NumTimeStep == 0) { Scan(); MaxTimeStep = NumTimeStep; } var fileStream = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); StreamReader sr = new StreamReader(fileStream, Encoding.Default); string line = ""; int nfeat = 0; int nstep = StepsToLoad; int progress = 0; sr.ReadLine(); line = sr.ReadLine(); sr.ReadLine(); var strs = TypeConverterEx.Split <string>(line); nfeat = int.Parse(strs[1]); if (!DataCube.IsAllocated(var_index) || DataCube.Size[1] != nstep) { DataCube.Allocate(var_index); } DataCube.DateTimes = new DateTime[nstep]; for (int t = 0; t < nstep; t++) { line = sr.ReadLine(); var vv = TypeConverterEx.SkipSplit <float>(line, 6); DataCube[var_index, t.ToString(), ":"] = vv; progress = Convert.ToInt32(t * 100 / nstep); var temp = TypeConverterEx.Split <int>(line, 6); DataCube.DateTimes[t] = new DateTime(temp[0], temp[1], temp[2], temp[3], temp[4], temp[5]); if (progress % 10 == 0) { OnLoading(progress); } } OnLoading(100); fileStream.Close(); sr.Close(); //OnLoaded(Source); OnDataCubedLoaded(DataCube); }
public override bool Load(ICancelProgressHandler cancelprogess) { OnLoading(0); Scan(); int nvar = Variables.Length; var mat = new DataCube <float>(nvar, StepsToLoad, 1); var fileStream = new FileStream(FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); StreamReader sr = new StreamReader(fileStream, Encoding.Default); string line = ""; int progress = 0; mat.DateTimes = new DateTime[StepsToLoad]; for (int i = 0; i < _skipped_line; i++) { line = sr.ReadLine(); } for (int j = 0; j < nvar; j++) { mat.Allocate(j, StepsToLoad, 1); } for (int i = 0; i < StepsToLoad; i++) { line = sr.ReadLine(); var buf = TypeConverterEx.Split <int>(line, 6); mat.DateTimes[i] = new DateTime(buf[0], buf[1], buf[2], buf[3], buf[4], buf[5]); var buf1 = TypeConverterEx.SkipSplit <float>(line, 6); for (int j = 0; j < nvar; j++) { mat[j, i, 0] = buf1[j]; } progress = Convert.ToInt32(i * 100 / StepsToLoad); OnLoading(progress); } OnLoading(100); mat.Variables = Variables; mat.TimeBrowsable = true; sr.Close(); fileStream.Close(); DataCube = mat; OnLoaded(cancelprogess); return(true); }
public override void LoadDataCube(int var_index) { OnLoading(0); if (MaxTimeStep <= 0 || NumTimeStep == 0) { Scan(); MaxTimeStep = NumTimeStep; } FileStream fs = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); BinaryReader br = new BinaryReader(fs); int nstep = StepsToLoad; float vv = 0; long layerbyte = _Grid.RowCount * _Grid.ColumnCount * 4; long var_byte = 8 + 16 + 12 + layerbyte * _Grid.ActualLayerCount; int progress = 0; fs = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); br = new BinaryReader(fs); if (DataCube == null) { DataCube = new DataCube <float>(Variables.Length, nstep, _Grid.ActualLayerCount, true); } DataCube.Allocate(var_index); for (int t = 0; t < nstep; t++) { for (int v = 0; v < var_index; v++) { fs.Seek(var_byte, SeekOrigin.Current); } fs.Seek(4 * 2, SeekOrigin.Current); var vn = new string(br.ReadChars(16)).Trim(); fs.Seek(4 * 3, SeekOrigin.Current); if (LoadingBehavior == MFLoadingLayersBehavior.Sum) { var buf = new float[_Grid.ActiveCellCount]; for (int l = 0; l < _Grid.ActualLayerCount; l++) { int index = 0; for (int r = 0; r < _Grid.RowCount; r++) { for (int c = 0; c < _Grid.ColumnCount; c++) { vv = br.ReadSingle(); if (_Grid.IBound[Layer, r, c] != 0) { buf[index] = buf[index] + vv * Scale; index++; } } } } DataCube.ILArrays[var_index][t, ":"] = buf; } else if (LoadingBehavior == MFLoadingLayersBehavior.Average) { var buf = new float[_Grid.ActiveCellCount]; for (int l = 0; l < _Grid.ActualLayerCount; l++) { int index = 0; for (int r = 0; r < _Grid.RowCount; r++) { for (int c = 0; c < _Grid.ColumnCount; c++) { vv = br.ReadSingle(); if (_Grid.IBound[Layer, r, c] != 0) { buf[index] = buf[index] + vv * Scale; index++; } } } } for (int i = 0; i < _Grid.ActiveCellCount; i++) { buf[i] = buf[i] / _Grid.ActualLayerCount; } DataCube.ILArrays[var_index][t, ":"] = buf; } else if (LoadingBehavior == MFLoadingLayersBehavior.None) { for (int l = 0; l < _Grid.ActualLayerCount; l++) { if (l == Layer) { int index = 0; var buf = new float[_Grid.ActiveCellCount]; for (int r = 0; r < _Grid.RowCount; r++) { for (int c = 0; c < _Grid.ColumnCount; c++) { vv = br.ReadSingle(); if (_Grid.IBound[Layer, r, c] != 0) { buf[index] = vv * Scale; index++; } } } DataCube.ILArrays[var_index][t, ":"] = buf; } else { fs.Seek(layerbyte, SeekOrigin.Current); } } } for (int v = var_index + 1; v < Variables.Length; v++) { fs.Seek(var_byte, SeekOrigin.Current); } progress = Convert.ToInt32(t * 100 / nstep); OnLoading(progress); } if (progress < 100) { OnLoading(100); } br.Close(); fs.Close(); OnDataCubedLoaded(DataCube); }
public void LoadDataCubeSingle(Dictionary <int, int> mapping, int var_index) { var xml = _FileName + ".xml"; if (File.Exists(xml)) { _Descriptor = DataCubeDescriptor.Deserialize(xml); } if (MaxTimeStep <= 0 || NumTimeStep == 0) { Scan(); MaxTimeStep = NumTimeStep; } int feaNum = 0; int varnum = 0; int nstep = StepsToLoad; int nhru = mapping.Keys.Count; int progress = 0; FileStream fs = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); BinaryReader br = new BinaryReader(fs); try { varnum = br.ReadInt32(); Variables = new string[varnum]; for (int i = 0; i < varnum; i++) { int varname_len = br.ReadInt32(); Variables[i] = new string(br.ReadChars(varname_len)).Trim(); feaNum = br.ReadInt32(); } OnLoading(0); if (DataCube == null) { DataCube = new DataCube <float>(Variables.Length, nstep, nhru, true); DataCube.Variables = Variables; } if (!DataCube.IsAllocated(var_index) || DataCube.Size[1] != nstep) { DataCube.Allocate(var_index); } var vv = new float[feaNum]; for (int t = 0; t < nstep; t++) { for (int s = 0; s < feaNum; s++) { vv[s] = br.ReadSingle() * Scale; } for (int i = 0; i < nhru; i++) { DataCube[var_index, t, i] = vv[mapping[i + 1]]; } progress = Convert.ToInt32(t * 100 / nstep); OnLoading(progress); } br.Close(); fs.Close(); if (_Descriptor.TimeStamps != null) { DataCube.DateTimes = new DateTime[nstep]; for (int t = 0; t < nstep; t++) { DataCube.DateTimes[t] = _Descriptor.TimeStamps[t]; } } OnDataCubedLoaded(DataCube); } catch (Exception ex) { br.Close(); fs.Close(); OnLoadFailed("Failed to load. Error message: " + ex.Message); } }
public override void LoadDataCube(int var_index) { var xml = _FileName + ".xml"; if (File.Exists(xml)) { _Descriptor = DataCubeDescriptor.Deserialize(xml); } if (MaxTimeStep <= 0 || NumTimeStep == 0) { Scan(); MaxTimeStep = NumTimeStep; } int feaNum = 0; int varnum = 0; int nstep = StepsToLoad; int progress = 0; FileStream fs = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); BinaryReader br = new BinaryReader(fs); varnum = br.ReadInt32(); Variables = new string[varnum]; try { for (int i = 0; i < varnum; i++) { int varname_len = br.ReadInt32(); Variables[i] = new string(br.ReadChars(varname_len)).Trim(); feaNum = br.ReadInt32(); } OnLoading(0); if (DataCube == null) { DataCube = new DataCube <float>(Variables.Length, nstep, feaNum, true); DataCube.Variables = Variables; } if (!DataCube.IsAllocated(var_index) || DataCube.Size[1] != nstep) { DataCube.Allocate(var_index); } for (int t = 0; t < nstep; t++) { var buf = new float[feaNum]; for (int s = 0; s < feaNum; s++) { br.ReadBytes(4 * var_index); buf[s] = br.ReadSingle() * Scale; br.ReadBytes(4 * (varnum - var_index - 1)); } DataCube.ILArrays[var_index][t, ":"] = buf; progress = Convert.ToInt32(t * 100 / nstep); OnLoading(progress); } br.Close(); fs.Close(); if (_Descriptor.TimeStamps != null) { DataCube.DateTimes = new DateTime[nstep]; for (int t = 0; t < nstep; t++) { DataCube.DateTimes[t] = _Descriptor.TimeStamps[t]; } } OnDataCubedLoaded(DataCube); } catch (Exception ex) { br.Close(); fs.Close(); OnLoadFailed("Failed to load. Error message: " + ex.Message); } }
public override bool Load(int var_index, ICancelProgressHandler progresshandler) { _ProgressHandler = progresshandler; _NumTimeStep = TimeService.GetIOTimeLength(ModelService.WorkDirectory); string filename = this.PackageInfo.FileName; if (UseSpecifiedFile) { filename = SpecifiedFileName; } if (File.Exists(filename)) { var network = _SFRPackage.RiverNetwork; RiverNetwork = network; if (network == null) { return(false); } else { ReachIndex.Clear(); int reachNum = network.ReachCount; int count = 1; int nstep = StepsToLoad; OnLoading(0); FileStream fs = new FileStream(filename, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); StreamReader sr = new StreamReader(fs, System.Text.Encoding.Default); string line = ""; int varLen = DefaultAttachedVariables.Length; int index = 0; int progress = 0; if (!IsLoadCompleteData) { reachNum = network.RiverCount; } if (IsReadSSData) { SkippedSteps = SkippedSteps - 1; } for (int t = 0; t < SkippedSteps * network.ReachCount + SkippedSteps * 8; t++) { if (!sr.EndOfStream) { line = sr.ReadLine(); } } for (int i = 0; i < network.RiverCount; i++) { for (int j = 0; j < network.Rivers[i].Reaches.Count; j++) { ReachIndex.Add(Tuple.Create(i, j, index)); index++; } } OnLoading(progress); try { DataCube = new DataCube <float>(varLen, nstep, reachNum, true) { Name = "SFR_Output", AllowTableEdit = false, TimeBrowsable = true }; DataCube.Allocate(var_index); DataCube.DateTimes = new DateTime[nstep]; } catch (Exception) { Message = "Out of memory."; OnLoadFailed(Message, progresshandler); return(false); } for (int t = 0; t < nstep; t++) { for (int c = 0; c < 8; c++) { sr.ReadLine(); } int rch_index = 0; for (int i = 0; i < network.RiverCount; i++) { if (IsLoadCompleteData) { for (int j = 0; j < network.Rivers[i].Reaches.Count; j++) { line = sr.ReadLine().Trim(); if (line != "") { var temp = TypeConverterEx.SkipSplit <float>(line, 5); //Values.Value[var_index][t][rch_index] = temp[var_index]; DataCube.ILArrays[var_index].SetValue(temp[var_index], t, rch_index); } else { Debug.WriteLine(String.Format("step:{0} seg:{1} reach:{2}", t, i + 1, j + 1)); goto finished; } rch_index++; } } else { for (int j = 0; j < network.Rivers[i].Reaches.Count - 1; j++) { line = sr.ReadLine().Trim(); } line = sr.ReadLine().Trim(); var temp = TypeConverterEx.SkipSplit <float>(line, 5); //Values.Value[var_index][t][i] = temp[var_index]; DataCube.ILArrays[var_index].SetValue(temp[var_index], t, i); } } DataCube.DateTimes[t] = TimeService.Timeline[t]; progress = t * 100 / nstep; if (progress > count) { OnLoading(progress); count++; } } finished: { OnLoading(100); } sr.Close(); fs.Close(); if (IsLoadCompleteData) { DataCube.Topology = _SFRPackage.ReachTopology; } else { DataCube.Topology = _SFRPackage.SegTopology; } DataCube.Variables = DefaultAttachedVariables; Variables = DefaultAttachedVariables; OnLoaded(progresshandler); return(true); } } else { return(false); } }
//public override My3DMat<float> Load(int var_index) //{ // OnLoading(0); // Scan(); // var fileStream = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); // StreamReader sr = new StreamReader(fileStream, Encoding.Default); // string line = ""; // int nfeat = 0; // int nstep = StepsToLoad; // int progress = 0; // if (StepsToLoad < NumTimeStep && StepsToLoad > 0) // nstep = StepsToLoad; // sr.ReadLine(); // line = sr.ReadLine(); // sr.ReadLine(); // var strs = TypeConverterEx.Split<string>(line); // nfeat = int.Parse(strs[1]); // if (!Source.IsAllocated(var_index) || Source.Size[1] != nstep) // Source.Allocate(var_index, nstep, nfeat); // Source.DateTimes = new DateTime[nstep]; // for (int t = 0; t < nstep; t++) // { // line = sr.ReadLine(); // var vv = TypeConverterEx.SkipSplit<float>(line, 6); // Source.Value[var_index][t] = vv; // progress = Convert.ToInt32(t * 100 / nstep); // var temp = TypeConverterEx.Split<int>(line, 6); // Source.DateTimes[t] = new DateTime(temp[0], temp[1], temp[2], temp[3], temp[4], temp[5]); // if (progress % 10 == 0) // OnLoading(progress); // } // OnLoading(100); // fileStream.Close(); // sr.Close(); // OnLoaded(Source); // return Source; //} /// <summary> /// load based on a mapping table /// </summary> /// <param name="mapping">[station_id,hru_id]</param> /// <returns>3d mat</returns> public void Load(Dictionary <int, int> mapping, int var_index) { OnLoading(0); if (MaxTimeStep <= 0 || NumTimeStep == 0) { Scan(); MaxTimeStep = NumTimeStep; } var fileStream = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); StreamReader sr = new StreamReader(fileStream, Encoding.Default); string line = ""; int nfeat = 0; int nstep = StepsToLoad; int progress = 0; int nhru = mapping.Keys.Count; if (StepsToLoad < NumTimeStep && StepsToLoad > 0) { nstep = StepsToLoad; } sr.ReadLine(); line = sr.ReadLine(); sr.ReadLine(); var strs = TypeConverterEx.Split <string>(line); nfeat = int.Parse(strs[1]); if (!DataCube.IsAllocated(var_index) || DataCube.Size[1] != nstep) { DataCube.Allocate(var_index); } DataCube.DateTimes = new DateTime[nstep]; if (nhru == 0) { nhru = nfeat; for (int t = 0; t < nstep; t++) { line = sr.ReadLine(); var vv = TypeConverterEx.SkipSplit <float>(line, 6); for (int i = 0; i < nhru; i++) { DataCube[var_index, t, i] = vv[i]; } var temp = TypeConverterEx.Split <int>(line, 6); DataCube.DateTimes[t] = new DateTime(temp[0], temp[1], temp[2], temp[3], temp[4], temp[5]); progress = Convert.ToInt32(t * 100 / nstep); OnLoading(progress); } } else { for (int t = 0; t < nstep; t++) { line = sr.ReadLine(); var vv = TypeConverterEx.SkipSplit <float>(line, 6); for (int i = 0; i < nhru; i++) { DataCube[var_index, t, i] = vv[mapping[i + 1]]; } var temp = TypeConverterEx.Split <int>(line, 6); DataCube.DateTimes[t] = new DateTime(temp[0], temp[1], temp[2], temp[3], temp[4], temp[5]); progress = Convert.ToInt32(t * 100 / nstep); OnLoading(progress); } } OnLoading(100); fileStream.Close(); sr.Close(); OnDataCubedLoaded(DataCube); }
public void ReadTxtWaterTable() { if (File.Exists(_FileName)) { var grid = _Grid as MFGrid; StreamReader srFhd = new StreamReader(_FileName); string headline = srFhd.ReadLine(); string[] strs = TypeConverterEx.Split <string>(headline); int step = int.Parse(strs[0]); int sp = int.Parse(strs[1]); int col = int.Parse(strs[5]); int row = int.Parse(strs[6]); srFhd.Close(); int nlayer = grid.ActualLayerCount; string line = ""; int stepIndex = 0; int nstep = StepsToLoad; srFhd = new StreamReader(_FileName); int colLine = (int)Math.Ceiling(col / 10.0); float head = 0; float[][] heads = new float[grid.ActualLayerCount][]; List <float[]> headLst = new List <float[]>(); for (int l = 0; l < grid.ActualLayerCount; l++) { heads[l] = new float[grid.ActiveCellCount]; } while (!srFhd.EndOfStream) { float[] wt = new float[grid.ActiveCellCount]; for (int l = 0; l < nlayer; l++) { headline = srFhd.ReadLine(); strs = Regex.Split(headline.Trim(), @"[ ]+"); step = int.Parse(strs[0]); sp = int.Parse(strs[1]); int index = 0; for (int r = 0; r < row; r++) { line = ""; for (int i = 0; i < colLine; i++) { line += srFhd.ReadLine() + " "; } strs = TypeConverterEx.Split <string>(line); for (int c = 0; c < strs.Length; c++) { head = float.Parse(strs[c]); if (grid.IBound[0, r, c] != 0) { heads[l][index] = head; index++; } } } float[] lwt = new float[grid.ActualLayerCount]; for (int i = 0; i < grid.ActiveCellCount; i++) { for (int ll = 0; ll < grid.ActualLayerCount; ll++) { lwt[ll] = heads[ll][i]; } wt[i] = lwt.Max(); } } headLst.Add(wt); stepIndex++; if (stepIndex >= nstep) { break; } } if (DataCube == null) { DataCube = new DataCube <float>(Variables.Length, nstep, grid.ActualLayerCount, true); } DataCube.Allocate(0); for (int i = 0; i < headLst.Count; i++) { DataCube.ILArrays[0][i] = headLst[i]; } srFhd.Close(); heads = null; headLst.Clear(); OnDataCubedLoaded(DataCube); } else { OnLoadFailed("The FHD file dose not exist: " + _FileName); } }
public void ReadBinWaterDepth() { if (File.Exists(_FileName)) { OnLoading(0); if (MaxTimeStep <= 0 || NumTimeStep == 0) { Scan(); MaxTimeStep = NumTimeStep; } var grid = _Grid as MFGrid; FileStream fs = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); BinaryReader br = new BinaryReader(fs); long layerbyte = 32 + 4 * 3 + grid.RowCount * grid.ColumnCount * 4; int nstep = StepsToLoad; float head = 0; int progress = 0; float[][] heads = new float[grid.ActualLayerCount][]; for (int l = 0; l < grid.ActualLayerCount; l++) { heads[l] = new float[grid.ActiveCellCount]; } if (DataCube == null) { DataCube = new DataCube <float>(Variables.Length, nstep, grid.ActualLayerCount, true); } DataCube.Allocate(0); float[] lwt = new float[grid.ActualLayerCount]; for (int t = 0; t < nstep; t++) { for (int l = 0; l < grid.ActualLayerCount; l++) { fs.Seek(32, SeekOrigin.Current); var vv = br.ReadInt32(); vv = br.ReadInt32(); vv = br.ReadInt32(); int index = 0; for (int r = 0; r < grid.RowCount; r++) { for (int c = 0; c < grid.ColumnCount; c++) { head = br.ReadSingle(); if (grid.IBound[0, r, c] != 0) { heads[l][index] = head; index++; } } } } var buf = new float[grid.ActiveCellCount]; for (int i = 0; i < grid.ActiveCellCount; i++) { for (int ll = 0; ll < grid.ActualLayerCount; ll++) { lwt[ll] = heads[ll][i]; } buf[i] = grid.Elevations[0, 0, i] - lwt.Max(); } DataCube.ILArrays[0][t, ":"] = buf; progress = Convert.ToInt32(t * 100 / nstep); OnLoading(progress); } if (progress < 100) { OnLoading(100); } br.Close(); fs.Close(); OnDataCubedLoaded(DataCube); } else { OnLoadFailed("The FHD file dose not exist: " + _FileName); } }
public void ReadBinTotalVertDis() { if (File.Exists(_FileName)) { OnLoading(0); if (MaxTimeStep <= 0 || NumTimeStep == 0) { Scan(); MaxTimeStep = NumTimeStep; } var grid = _Grid as MFGrid; FileStream fs = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); BinaryReader br = new BinaryReader(fs); long layerbyte = 32 + 4 * 3 + grid.RowCount * grid.ColumnCount * 4; int nstep = StepsToLoad; float head = 0; int progress = 0; float[][] vts = new float[grid.ActualLayerCount][]; for (int l = 0; l < grid.ActualLayerCount; l++) { vts[l] = new float[grid.ActiveCellCount]; } if (DataCube == null) { DataCube = new DataCube <float>(Variables.Length, nstep, grid.ActualLayerCount, true); } DataCube.Allocate(0); float total_vt = 0; for (int t = 0; t < nstep; t++) { for (int l = 0; l < grid.ActualLayerCount; l++) { fs.Seek(32, SeekOrigin.Current); var vv = br.ReadInt32(); vv = br.ReadInt32(); vv = br.ReadInt32(); int index = 0; for (int r = 0; r < grid.RowCount; r++) { for (int c = 0; c < grid.ColumnCount; c++) { head = br.ReadSingle(); if (grid.IBound[0, r, c] != 0) { vts[l][index] = head; index++; } } } } for (int i = 0; i < grid.ActiveCellCount; i++) { total_vt = 0; for (int ll = 0; ll < grid.ActualLayerCount; ll++) { total_vt += vts[ll][i]; } DataCube[0, t, i] = total_vt / grid.ActualLayerCount; } progress = Convert.ToInt32(t * 100 / nstep); OnLoading(progress); } if (progress < 100) { OnLoading(100); } br.Close(); fs.Close(); OnDataCubedLoaded(DataCube); } else { } }
/// <summary> /// read layer head /// </summary> /// <param name="layer">index starting from 1</param> /// <returns></returns> public void ReadBinLayerHead(int layer) { if (File.Exists(_FileName)) { OnLoading(0); if (MaxTimeStep <= 0 || NumTimeStep == 0) { Scan(); MaxTimeStep = NumTimeStep; } var grid = _Grid as MFGrid; FileStream fs = new FileStream(_FileName, FileMode.Open, FileAccess.Read, FileShare.ReadWrite); BinaryReader br = new BinaryReader(fs); // KSTP,KPER,PERTIM,TOTIM,TEXT,NCOL,NROW,ILAY long layerbyte = 32 + 4 * 3 + grid.RowCount * grid.ColumnCount * 4; int nstep = StepsToLoad; float head = 0; int progress = 0; layer = layer - 1; if (layer < 0) { layer = 0; } // MyLazy3DMat<float> mat = new MyLazy3DMat<float>(Variables.Length, nstep, grid.ActiveCellCount); if (DataCube == null) { DataCube = new DataCube <float>(Variables.Length, nstep, grid.ActualLayerCount, true); } DataCube.Allocate(layer + 1); for (int t = 0; t < nstep; t++) { for (int l = 0; l < layer; l++) { fs.Seek(layerbyte, SeekOrigin.Current); } fs.Seek(32, SeekOrigin.Current); var vv = br.ReadInt32(); vv = br.ReadInt32(); vv = br.ReadInt32(); int index = 0; var buf = new float[grid.ActiveCellCount]; for (int r = 0; r < grid.RowCount; r++) { for (int c = 0; c < grid.ColumnCount; c++) { head = br.ReadSingle(); if (grid.IBound[0, r, c] != 0) { buf[index] = head; index++; } } } DataCube.ILArrays[layer + 1][t, ""] = buf; for (int l = layer + 1; l < grid.ActualLayerCount; l++) { fs.Seek(layerbyte, SeekOrigin.Current); } progress = Convert.ToInt32(t * 100 / nstep); OnLoading(progress); } if (progress < 100) { OnLoading(100); } br.Close(); fs.Close(); OnDataCubedLoaded(DataCube); } else { OnLoadFailed("The FHD file dose not exist: " + _FileName); } }