/// <summary> /// Load a slice from the file. /// </summary> /// <param name="slice">Carries variable to load, dimensions in file and what to load.</param> /// <returns></returns> public override ScalarField LoadFieldSlice(SliceRange slice) { ScalarField field; Index offsets = new Index(slice.GetOffsets()); NetCDF.ResultCode ncState = NetCDF.ResultCode.NC_NOERR; //int[] sizeInFile = new int[offsets.Length]; int[] sizeInFile = slice.GetLengths(); // Probably has less dimensions. int[] sizeField = new int[offsets.Length]; int numDimsField = 0; //int currDimSlice = 0; for (int dim = 0; dim < offsets.Length; ++dim) { if(offsets[dim] != -1 && sizeInFile[dim] > 1) { sizeField[numDimsField++] = sizeInFile[dim]; } // Include whole dimension. else if(offsets[dim] == -1) { // Fill size. int sizeDim; ncState = NetCDF.nc_inq_dimlen(_fileID, slice.GetDimensionID(dim), out sizeDim); Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR); sizeInFile[dim] = sizeDim; // Set offset to one. offset = 0, size = size of dimension. offsets[dim] = 0; // Save size in size-vector for the scalar field. sizeField[numDimsField++] = sizeDim; } } //if (slice.IsTimeDependent()) // numDimsField++; // Generate size index for field class. Index fieldSize = new Index(numDimsField); Array.Copy(sizeField, fieldSize.Data, numDimsField); // When the field has several time slices, add a time dimension. //if (slice.IsTimeDependent()) // fieldSize[numDimsField - 1] = slice.GetNumTimeSlices(); // Change order of dimensions, so that fastest dimension is at the end. for(int dim = 0; dim < fieldSize.Length/2; ++dim) { int tmp = fieldSize[dim]; fieldSize[dim] = fieldSize[fieldSize.Length - 1 - dim]; fieldSize[fieldSize.Length - 1 - dim] = tmp; } // Create a grid descriptor for the field. // TODO: Actually load this data. RectlinearGrid grid = new RectlinearGrid(fieldSize);//, new Vector(0.0f, fieldSize.Length), new Vector(0.1f, fieldSize.Length)); // Create scalar field instance and fill it with data. field = new ScalarField(grid); int sliceSize = grid.Size.Product();// / slice.GetNumTimeSlices(); // Get data. x64 dll fails in debug here... ncState = NetCDF.nc_get_vara_float(_fileID, (int)slice.GetVariable(), offsets.Data, sizeInFile, field.Data); Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR, ncState.ToString()); // Read in invalid value. float[] invalidval = new float[1]; ncState = NetCDF.nc_get_att_float(_fileID, (int)slice.GetVariable(), "_FillValue", invalidval); field.InvalidValue = invalidval[0]; return field; }
/// <summary> /// Load a slice from the file. /// </summary> /// <param name="slice">Carries variable to load, dimensions in file and what to load.</param> /// <returns></returns> public override ScalarFieldUnsteady LoadTimeSlices(SliceRange slice, int starttime = -1, int timelength = -1) { Index offsets = new Index(slice.GetOffsets()); int spaceDims = 4; int[] sizeInFile = slice.GetLengths(); Debug.Assert(starttime == -1 && timelength == -1, "Ignoring those parameters. Plase specify in the SliceRange instance!"); // Probably has less dimensions. int[] sizeField = new int[spaceDims]; int numDimsField = 0; // Exclude time dimension. It will be treated differently. for (int dim = 0; dim < offsets.Length; ++dim) { if (offsets[dim] != -1 && sizeInFile[dim] > 1) { sizeField[numDimsField++] = sizeInFile[dim]; } // Include whole dimension. else if (offsets[dim] == -1) { // Fill size. sizeInFile[dim] = _dimLengths[dim]; // Set offset to one. offset = 0, size = size of dimension. offsets[dim] = 0; // Save size in size-vector for the scalar field. sizeField[numDimsField++] = sizeInFile[dim]; } } int numSpaceDims = ((sizeInFile[0] > 1) ? 1 : 0) + ((sizeInFile[1] > 1) ? 1 : 0) + ((sizeInFile[2] > 1) ? 1 : 0); Index fieldSize = new Index(numSpaceDims); Array.Copy(sizeField, fieldSize.Data, fieldSize.Length); Debug.Assert(sizeInFile[3] == 1, "How should I load several members into one data block???"); // Create a grid descriptor for the field. // TODO: Actually load this data. RectlinearGrid grid = new RectlinearGrid(fieldSize); // Create scalar field instance and fill it with data. int sliceSize = grid.Size.Product(); // For each time and subtime step, run through them. ScalarField[] fields = new ScalarField[sizeInFile[4] * sizeInFile[5]]; int indexTime = 0; for (int time = 0; time < sizeInFile[spaceDims]; ++time) { for (int subtime = 0; subtime < sizeInFile[spaceDims + 1]; ++subtime) { // Now, load one single file. string filename = RedSea.Singleton.GetFilename(offsets[spaceDims] + time, offsets[spaceDims + 1] + subtime, offsets[3], slice.GetVariable()); using (FileStream fs = File.Open(@filename, FileMode.Open)) { // Read in the data you need. using (BinaryReader reader = new BinaryReader(fs)) { // Read in all floats. Debug.Assert(reader.BaseStream.Length >= sliceSize * sizeof(float)); fields[indexTime] = new ScalarField(grid); int indexSpace = 0; for (int z = offsets[2]; z < offsets[2] + sizeInFile[2]; ++z) { // Set file reader position to right start point. reader.BaseStream.Seek(z * _dimLengths[0] * _dimLengths[1] + offsets[1] * _dimLengths[0] + offsets[0], SeekOrigin.Begin); for (int y = offsets[1]; y < offsets[1] + sizeInFile[1]; ++y) { for (int x = offsets[0]; x < offsets[0] + sizeInFile[0]; ++x) { fields[indexTime][indexSpace++] = reader.ReadSingle(); } // Advance one line. reader.BaseStream.Seek((_dimLengths[0] - sizeInFile[0]) * sizeof(float), SeekOrigin.Current); } } } } // Change Endian of data. if (!Range.CorrectEndian) { fields[indexTime].ChangeEndian(); for (int i = 0; i < fields[indexTime].Data.Length; ++i) { if (fields[indexTime].Data[i] == 0) { fields[indexTime].Data[i] = float.MaxValue; } } fields[indexTime].InvalidValue = float.MaxValue; } // Go on to next file. indexTime++; } } return(new ScalarFieldUnsteady(fields, offsets[spaceDims] * _dimLengths[spaceDims + 1] + offsets[spaceDims + 1])); }
/// <summary> /// Load a slice from the file. /// </summary> /// <param name="slice">Carries variable to load, dimensions in file and what to load.</param> /// <returns></returns> public override ScalarField LoadFieldSlice(SliceRange slice) { ScalarField field; Index offsets = new Index(slice.GetOffsets()); NetCDF.ResultCode ncState = NetCDF.ResultCode.NC_NOERR; //int[] sizeInFile = new int[offsets.Length]; int[] sizeInFile = slice.GetLengths(); // Probably has less dimensions. int[] sizeField = new int[offsets.Length]; int numDimsField = 0; //int currDimSlice = 0; for (int dim = 0; dim < offsets.Length; ++dim) { if (offsets[dim] != -1 && sizeInFile[dim] > 1) { sizeField[numDimsField++] = sizeInFile[dim]; } // Include whole dimension. else if (offsets[dim] == -1) { // Fill size. int sizeDim; ncState = NetCDF.nc_inq_dimlen(_fileID, slice.GetDimensionID(dim), out sizeDim); Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR); sizeInFile[dim] = sizeDim; // Set offset to one. offset = 0, size = size of dimension. offsets[dim] = 0; // Save size in size-vector for the scalar field. sizeField[numDimsField++] = sizeDim; } } //if (slice.IsTimeDependent()) // numDimsField++; // Generate size index for field class. Index fieldSize = new Index(numDimsField); Array.Copy(sizeField, fieldSize.Data, numDimsField); // When the field has several time slices, add a time dimension. //if (slice.IsTimeDependent()) // fieldSize[numDimsField - 1] = slice.GetNumTimeSlices(); // Change order of dimensions, so that fastest dimension is at the end. for (int dim = 0; dim < fieldSize.Length / 2; ++dim) { int tmp = fieldSize[dim]; fieldSize[dim] = fieldSize[fieldSize.Length - 1 - dim]; fieldSize[fieldSize.Length - 1 - dim] = tmp; } // Create a grid descriptor for the field. // TODO: Actually load this data. RectlinearGrid grid = new RectlinearGrid(fieldSize);//, new Vector(0.0f, fieldSize.Length), new Vector(0.1f, fieldSize.Length)); // Create scalar field instance and fill it with data. field = new ScalarField(grid); int sliceSize = grid.Size.Product();// / slice.GetNumTimeSlices(); // Get data. x64 dll fails in debug here... ncState = NetCDF.nc_get_vara_float(_fileID, (int)slice.GetVariable(), offsets.Data, sizeInFile, field.Data); Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR, ncState.ToString()); // Read in invalid value. float[] invalidval = new float[1]; ncState = NetCDF.nc_get_att_float(_fileID, (int)slice.GetVariable(), "_FillValue", invalidval); field.InvalidValue = invalidval[0]; return(field); }
/// <summary> /// Load a slice from the file. /// </summary> /// <param name="slice">Carries variable to load, dimensions in file and what to load.</param> /// <returns></returns> public override ScalarFieldUnsteady LoadTimeSlices(SliceRange slice, int starttime = -1, int timelength = -1) { Index offsets = new Index(slice.GetOffsets()); int spaceDims = 4; int[] sizeInFile = slice.GetLengths(); Debug.Assert(starttime == -1 && timelength == -1, "Ignoring those parameters. Plase specify in the SliceRange instance!"); // Probably has less dimensions. int[] sizeField = new int[spaceDims]; int numDimsField = 0; // Exclude time dimension. It will be treated differently. for (int dim = 0; dim < offsets.Length; ++dim) { if (offsets[dim] != -1 && sizeInFile[dim] > 1) { sizeField[numDimsField++] = sizeInFile[dim]; } // Include whole dimension. else if (offsets[dim] == -1) { // Fill size. sizeInFile[dim] = _dimLengths[dim]; // Set offset to one. offset = 0, size = size of dimension. offsets[dim] = 0; // Save size in size-vector for the scalar field. sizeField[numDimsField++] = sizeInFile[dim]; } } int numSpaceDims = ((sizeInFile[0] > 1) ? 1 : 0) + ((sizeInFile[1] > 1) ? 1 : 0) + ((sizeInFile[2] > 1) ? 1 : 0); Index fieldSize = new Index(numSpaceDims); Array.Copy(sizeField, fieldSize.Data, fieldSize.Length); Debug.Assert(sizeInFile[3] == 1, "How should I load several members into one data block???"); // Create a grid descriptor for the field. // TODO: Actually load this data. RectlinearGrid grid = new RectlinearGrid(fieldSize); // Create scalar field instance and fill it with data. int sliceSize = grid.Size.Product(); // For each time and subtime step, run through them. ScalarField[] fields = new ScalarField[sizeInFile[4] * sizeInFile[5]]; int indexTime = 0; for (int time = 0; time < sizeInFile[spaceDims]; ++time) { for (int subtime = 0; subtime < sizeInFile[spaceDims + 1]; ++subtime) { // Now, load one single file. string filename = RedSea.Singleton.GetFilename(offsets[spaceDims] + time, offsets[spaceDims + 1] + subtime, offsets[3], slice.GetVariable()); using (FileStream fs = File.Open(@filename, FileMode.Open)) { // Read in the data you need. using (BinaryReader reader = new BinaryReader(fs)) { // Read in all floats. Debug.Assert(reader.BaseStream.Length >= sliceSize * sizeof(float)); fields[indexTime] = new ScalarField(grid); int indexSpace = 0; for (int z = offsets[2]; z < offsets[2] + sizeInFile[2]; ++z) { // Set file reader position to right start point. reader.BaseStream.Seek(z * _dimLengths[0] * _dimLengths[1] + offsets[1] * _dimLengths[0] + offsets[0], SeekOrigin.Begin); for (int y = offsets[1]; y < offsets[1] + sizeInFile[1]; ++y) { for (int x = offsets[0]; x < offsets[0] + sizeInFile[0]; ++x) { fields[indexTime][indexSpace++] = reader.ReadSingle(); } // Advance one line. reader.BaseStream.Seek((_dimLengths[0] - sizeInFile[0]) * sizeof(float), SeekOrigin.Current); } } } } // Change Endian of data. if (!Range.CorrectEndian) { fields[indexTime].ChangeEndian(); for (int i = 0; i < fields[indexTime].Data.Length; ++i) { if (fields[indexTime].Data[i] == 0) fields[indexTime].Data[i] = float.MaxValue; } fields[indexTime].InvalidValue = float.MaxValue; } // Go on to next file. indexTime++; } } return new ScalarFieldUnsteady(fields, offsets[spaceDims] * _dimLengths[spaceDims+1] + offsets[spaceDims+1]); }