public void GetSlice() { // insert var columnNames = new List <string>(); for (int i = 0; i < 100; i++) { Keyspace.Insert("GetSlice", new ColumnPath("Standard2", null, "GetSlice." + i), "GetSlice.Value." + i); columnNames.Add("GetSlice." + i); } // get var columnParent = new ColumnParent("Standard2"); var sliceRange = new SliceRange(false, 150); var slicePredicate = new SlicePredicate(null, sliceRange); var columns = Keyspace.GetSlice("GetSlice", columnParent, slicePredicate); Assert.NotNull(columns); Assert.Equal(100, columns.Count()); var receivedColumnNames = columns.OrderBy(c => c.Name).Select(c => c.Name).ToList(); Assert.NotEmpty(receivedColumnNames); Assert.Equal(columnNames.OrderBy(i => i).ToList(), receivedColumnNames); // clean up Keyspace.Remove("GetSlice", new ColumnPath("Standard2")); }
public void MultigetSlice() { // insert var columnPath = new ColumnPath("Standard1", null, "MultigetSlice"); var keys = new List <string>(); for (int i = 0; i < 100; i++) { var key = "MultigetSlice." + i; Keyspace.Insert(key, columnPath, "MultigetSlice.value." + i); keys.Add(key); } // get var columnParent = new ColumnParent("Standard1"); var sliceRange = new SliceRange(false, 150); var slicePredicate = new SlicePredicate(null, sliceRange); var ms = Keyspace.MultigetSlice(keys, columnParent, slicePredicate); for (int i = 0; i < 100; i++) { var columns = ms[keys[i]]; Assert.NotNull(columns); Assert.Equal(1, columns.Count); Assert.True(columns.First().Value.StartsWith("MultigetSlice.")); } // remove for (int i = 0; i < 100; i++) { Keyspace.Remove("MultigetSlice." + i, columnPath); } }
public override ScalarFieldUnsteady LoadTimeSlices(SliceRange var, int starttime, int timelength) { Close(); var ret = LoadTimeSeries(var, starttime, timelength); Open(); return(ret); }
/// <summary> /// Create a Loader object and open a NetCDF file. /// </summary> /// <param name="file">Path of the file.</param> //public LoaderRaw(string file) //{ // Debug.Assert(_numOpenFiles == 0, "Another file is still open!"); // _fileName = file; // // nDims = [3]; // // dimList = [ // // 500, 1, 500, // // 500, 1, 500, // // 50, 1, 50 // // ]; // // dataprec = ['float32']; // // nrecords = [1]; // // timeStepNumber = [108]; // //string metadata = System.IO.File.ReadAllText(@file+".meta"); // //int index = metadata.IndexOf("nDims = [ ") + ("nDims = [ ").Length; // //if (index != -1) // //{ // // int index2 = this.Message.IndexOf(",", index); // // if (index2 == -1) // // { // // index2 = this.Message.Length; // // } // //} // _dimIDs = new int[] { (int)RedSea.Variable.GRID_X, (int)RedSea.Variable.GRID_Y, (int)RedSea.Variable.GRID_Z }; // _dimLengths = new int[] { 500, 500, 50 }; //} //protected int _step; //protected int _substep; public LoaderRaw(RedSea.Variable var = RedSea.Variable.VELOCITY_X) { Debug.Assert(_numOpenFiles == 0, "Another file is still open!"); _dimIDs = (int[])_dimensionIDs.Clone(); // new int[] { (int)RedSea.Dimension.GRID_X, (int)RedSea.Dimension.GRID_Y, (int)RedSea.Dimension.GRID_Z, (int)RedSea.Dimension.MEMBER, (int)RedSea.Dimension.TIME, (int)RedSea.Dimension.SUBTIME }; _dimLengths = new int[] { 500, 500, 50, 50, RedSea.Singleton.NumSteps, RedSea.Singleton.NumSubsteps }; Range = new SliceRangeRaw(); Range.CorrectEndian = false; }
//public SliceRange(LoaderRaw loader) //{ // Initialize(loader.GetIDs, var); //} public SliceRange(SliceRange range) { _var = range._var; int numDims = range._presentDims.Length; _presentDims = new RedSea.Dimension[numDims]; _dimOffsets = new int[numDims]; _dimLengths = new int[numDims]; Array.Copy(range._presentDims, _presentDims, numDims); Array.Copy(range._dimOffsets, _dimOffsets, numDims); Array.Copy(range._dimLengths, _dimLengths, numDims); }
internal static Apache.Cassandra.SliceRange ToCassandraSliceRange(this SliceRange sliceRange) { if (sliceRange == null) { return(null); } return(new Apache.Cassandra.SliceRange { Count = sliceRange.Count, Finish = sliceRange.EndColumn ?? new byte[0], Reversed = sliceRange.Reversed, Start = sliceRange.StartColumn ?? new byte[0] }); }
public static ScalarFieldUnsteady LoadTimeSeries(SliceRange var, int starttime, int timelength) { ScalarField[] slices = new ScalarField[timelength]; LoaderNCF ncFile; for (int time = starttime; time < starttime + timelength; ++time) { ncFile = RedSea.Singleton.GetLoaderNCF(time);// path + (time + 1) + filename); slices[time] = ncFile.LoadFieldSlice(var); ncFile.Close(); } return(new ScalarFieldUnsteady(slices, starttime)); }
public void GetSuperSlice() { // insert for (int i = 0; i < 100; i++) { var cp = new ColumnPath("Super1", "SuperColumn.1", "GetSuperSlice_" + i); var cp2 = new ColumnPath("Super1", "SuperColumn.2", "GetSuperSlice_" + i); Keyspace.Insert("GetSuperSlice", cp, "GetSuperSlice_value_" + i); Keyspace.Insert("GetSuperSlice", cp2, "GetSuperSlice_value_" + i); } // get var columnParent = new ColumnParent("Super1"); var sliceRange = new SliceRange(false, 150); var slicePredicate = new SlicePredicate(null, sliceRange); var columns = Keyspace.GetSuperSlice("GetSuperSlice", columnParent, slicePredicate); Assert.NotNull(columns); Assert.Equal(2, columns.Count()); // clean up Keyspace.Remove("GetSuperSlice", new ColumnPath("Super1")); }
public override VectorFieldUnsteady LoadTimeVectorField(SliceRange[] vars, int starttime, int timelength) { return LoadTimeSeries(vars, starttime, timelength); }
/// <summary> /// Load a slice from the file. /// </summary> /// <param name="slice">Carries variable to load, dimensions in file and what to load.</param> /// <returns></returns> public override ScalarFieldUnsteady LoadTimeSlices(SliceRange slice, int starttime = -1, int timelength = -1) { Index offsets = new Index(slice.GetOffsets()); int spaceDims = 4; int[] sizeInFile = slice.GetLengths(); Debug.Assert(starttime == -1 && timelength == -1, "Ignoring those parameters. Plase specify in the SliceRange instance!"); // Probably has less dimensions. int[] sizeField = new int[spaceDims]; int numDimsField = 0; // Exclude time dimension. It will be treated differently. for (int dim = 0; dim < offsets.Length; ++dim) { if (offsets[dim] != -1 && sizeInFile[dim] > 1) { sizeField[numDimsField++] = sizeInFile[dim]; } // Include whole dimension. else if (offsets[dim] == -1) { // Fill size. sizeInFile[dim] = _dimLengths[dim]; // Set offset to one. offset = 0, size = size of dimension. offsets[dim] = 0; // Save size in size-vector for the scalar field. sizeField[numDimsField++] = sizeInFile[dim]; } } int numSpaceDims = ((sizeInFile[0] > 1) ? 1 : 0) + ((sizeInFile[1] > 1) ? 1 : 0) + ((sizeInFile[2] > 1) ? 1 : 0); Index fieldSize = new Index(numSpaceDims); Array.Copy(sizeField, fieldSize.Data, fieldSize.Length); Debug.Assert(sizeInFile[3] == 1, "How should I load several members into one data block???"); // Create a grid descriptor for the field. // TODO: Actually load this data. RectlinearGrid grid = new RectlinearGrid(fieldSize); // Create scalar field instance and fill it with data. int sliceSize = grid.Size.Product(); // For each time and subtime step, run through them. ScalarField[] fields = new ScalarField[sizeInFile[4] * sizeInFile[5]]; int indexTime = 0; for (int time = 0; time < sizeInFile[spaceDims]; ++time) { for (int subtime = 0; subtime < sizeInFile[spaceDims + 1]; ++subtime) { // Now, load one single file. string filename = RedSea.Singleton.GetFilename(offsets[spaceDims] + time, offsets[spaceDims + 1] + subtime, offsets[3], slice.GetVariable()); using (FileStream fs = File.Open(@filename, FileMode.Open)) { // Read in the data you need. using (BinaryReader reader = new BinaryReader(fs)) { // Read in all floats. Debug.Assert(reader.BaseStream.Length >= sliceSize * sizeof(float)); fields[indexTime] = new ScalarField(grid); int indexSpace = 0; for (int z = offsets[2]; z < offsets[2] + sizeInFile[2]; ++z) { // Set file reader position to right start point. reader.BaseStream.Seek(z * _dimLengths[0] * _dimLengths[1] + offsets[1] * _dimLengths[0] + offsets[0], SeekOrigin.Begin); for (int y = offsets[1]; y < offsets[1] + sizeInFile[1]; ++y) { for (int x = offsets[0]; x < offsets[0] + sizeInFile[0]; ++x) { fields[indexTime][indexSpace++] = reader.ReadSingle(); } // Advance one line. reader.BaseStream.Seek((_dimLengths[0] - sizeInFile[0]) * sizeof(float), SeekOrigin.Current); } } } } // Change Endian of data. if (!Range.CorrectEndian) { fields[indexTime].ChangeEndian(); for (int i = 0; i < fields[indexTime].Data.Length; ++i) { if (fields[indexTime].Data[i] == 0) fields[indexTime].Data[i] = float.MaxValue; } fields[indexTime].InvalidValue = float.MaxValue; } // Go on to next file. indexTime++; } } return new ScalarFieldUnsteady(fields, offsets[spaceDims] * _dimLengths[spaceDims+1] + offsets[spaceDims+1]); }
/// <summary> /// Load a slice from the file. /// </summary> /// <param name="slice">Carries variable to load, dimensions in file and what to load.</param> /// <returns></returns> public override ScalarField LoadFieldSlice(SliceRange slice) { ScalarField field; Index offsets = new Index(slice.GetOffsets()); NetCDF.ResultCode ncState = NetCDF.ResultCode.NC_NOERR; //int[] sizeInFile = new int[offsets.Length]; int[] sizeInFile = slice.GetLengths(); // Probably has less dimensions. int[] sizeField = new int[offsets.Length]; int numDimsField = 0; //int currDimSlice = 0; for (int dim = 0; dim < offsets.Length; ++dim) { if(offsets[dim] != -1 && sizeInFile[dim] > 1) { sizeField[numDimsField++] = sizeInFile[dim]; } // Include whole dimension. else if(offsets[dim] == -1) { // Fill size. int sizeDim; ncState = NetCDF.nc_inq_dimlen(_fileID, slice.GetDimensionID(dim), out sizeDim); Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR); sizeInFile[dim] = sizeDim; // Set offset to one. offset = 0, size = size of dimension. offsets[dim] = 0; // Save size in size-vector for the scalar field. sizeField[numDimsField++] = sizeDim; } } //if (slice.IsTimeDependent()) // numDimsField++; // Generate size index for field class. Index fieldSize = new Index(numDimsField); Array.Copy(sizeField, fieldSize.Data, numDimsField); // When the field has several time slices, add a time dimension. //if (slice.IsTimeDependent()) // fieldSize[numDimsField - 1] = slice.GetNumTimeSlices(); // Change order of dimensions, so that fastest dimension is at the end. for(int dim = 0; dim < fieldSize.Length/2; ++dim) { int tmp = fieldSize[dim]; fieldSize[dim] = fieldSize[fieldSize.Length - 1 - dim]; fieldSize[fieldSize.Length - 1 - dim] = tmp; } // Create a grid descriptor for the field. // TODO: Actually load this data. RectlinearGrid grid = new RectlinearGrid(fieldSize);//, new Vector(0.0f, fieldSize.Length), new Vector(0.1f, fieldSize.Length)); // Create scalar field instance and fill it with data. field = new ScalarField(grid); int sliceSize = grid.Size.Product();// / slice.GetNumTimeSlices(); // Get data. x64 dll fails in debug here... ncState = NetCDF.nc_get_vara_float(_fileID, (int)slice.GetVariable(), offsets.Data, sizeInFile, field.Data); Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR, ncState.ToString()); // Read in invalid value. float[] invalidval = new float[1]; ncState = NetCDF.nc_get_att_float(_fileID, (int)slice.GetVariable(), "_FillValue", invalidval); field.InvalidValue = invalidval[0]; return field; }
public override ScalarFieldUnsteady LoadTimeSlices(SliceRange var, int starttime, int timelength) { Close(); var ret = LoadTimeSeries(var, starttime, timelength); Open(); return ret; }
/// <summary> /// Class to define a slice to be loaded. Dimensions may be either included completely, or only one value is taken. /// </summary> //public partial class SliceRange //{ // public SliceRange(LoaderNCF file, RedSea.Variable var) : base() // { // // Query number of dimensions of variable. // int numDims; // NetCDF.ResultCode ncState = NetCDF.nc_inq_varndims(file.GetID(), (int)var, out numDims); // Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR); // int[] dimIDs = new int[numDims]; // // Query relevant dimensions. // ncState = NetCDF.nc_inq_vardimid(file.GetID(), (int)var, dimIDs); // Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR); // Initialize(dimIDs, var); // } //} // public delegate string FilenameFromIndex(int index); public static VectorFieldUnsteady LoadTimeSeries(SliceRange[] vars, int starttime, int timelength) { ScalarField[][] slices = new ScalarField[vars.Length][]; for (int var = 0; var < vars.Length; ++var) slices[var] = new ScalarField[timelength]; LoaderNCF ncFile; for (int time = 0; time < timelength; ++time) { ncFile = RedSea.Singleton.GetLoaderNCF(time + starttime);// path + (time + 1) + filename); for(int var = 0; var < vars.Length; ++var) { slices[var][time] = ncFile.LoadFieldSlice(vars[var]); } ncFile.Close(); } ScalarFieldUnsteady[] scalars = new ScalarFieldUnsteady[vars.Length]; for (int var = 0; var < vars.Length; ++var) scalars[var] = new ScalarFieldUnsteady(slices[var], starttime); return new VectorFieldUnsteady(scalars); }
public static ScalarFieldUnsteady LoadTimeSeries(SliceRange var, int starttime, int timelength) { ScalarField[] slices = new ScalarField[timelength]; LoaderNCF ncFile; for (int time = starttime; time < starttime + timelength; ++time) { ncFile = RedSea.Singleton.GetLoaderNCF(time);// path + (time + 1) + filename); slices[time] = ncFile.LoadFieldSlice(var); ncFile.Close(); } return new ScalarFieldUnsteady(slices, starttime); }
public abstract ScalarFieldUnsteady LoadTimeSlices(SliceRange slices, int starttime, int timelength);
public abstract VectorFieldUnsteady LoadTimeVectorField(SliceRange[] slices, int starttime, int timelength);
public static VectorFieldUnsteady LoadTimeSeries(SliceRange[] slices) { ScalarFieldUnsteady[] fields = new ScalarFieldUnsteady[slices.Length]; LoaderRaw loader = new LoaderRaw(RedSea.Variable.VELOCITY_X); for (int i = 0; i < slices.Length; ++i) fields[i] = loader.LoadTimeSlices(slices[i]); return new VectorFieldUnsteady(fields); }
public abstract ScalarField LoadFieldSlice(SliceRange slice);
/// <summary> /// Load a slice from the file. /// </summary> /// <param name="slice">Carries variable to load, dimensions in file and what to load.</param> /// <returns></returns> public override ScalarFieldUnsteady LoadTimeSlices(SliceRange slice, int starttime = -1, int timelength = -1) { Index offsets = new Index(slice.GetOffsets()); int spaceDims = 4; int[] sizeInFile = slice.GetLengths(); Debug.Assert(starttime == -1 && timelength == -1, "Ignoring those parameters. Plase specify in the SliceRange instance!"); // Probably has less dimensions. int[] sizeField = new int[spaceDims]; int numDimsField = 0; // Exclude time dimension. It will be treated differently. for (int dim = 0; dim < offsets.Length; ++dim) { if (offsets[dim] != -1 && sizeInFile[dim] > 1) { sizeField[numDimsField++] = sizeInFile[dim]; } // Include whole dimension. else if (offsets[dim] == -1) { // Fill size. sizeInFile[dim] = _dimLengths[dim]; // Set offset to one. offset = 0, size = size of dimension. offsets[dim] = 0; // Save size in size-vector for the scalar field. sizeField[numDimsField++] = sizeInFile[dim]; } } int numSpaceDims = ((sizeInFile[0] > 1) ? 1 : 0) + ((sizeInFile[1] > 1) ? 1 : 0) + ((sizeInFile[2] > 1) ? 1 : 0); Index fieldSize = new Index(numSpaceDims); Array.Copy(sizeField, fieldSize.Data, fieldSize.Length); Debug.Assert(sizeInFile[3] == 1, "How should I load several members into one data block???"); // Create a grid descriptor for the field. // TODO: Actually load this data. RectlinearGrid grid = new RectlinearGrid(fieldSize); // Create scalar field instance and fill it with data. int sliceSize = grid.Size.Product(); // For each time and subtime step, run through them. ScalarField[] fields = new ScalarField[sizeInFile[4] * sizeInFile[5]]; int indexTime = 0; for (int time = 0; time < sizeInFile[spaceDims]; ++time) { for (int subtime = 0; subtime < sizeInFile[spaceDims + 1]; ++subtime) { // Now, load one single file. string filename = RedSea.Singleton.GetFilename(offsets[spaceDims] + time, offsets[spaceDims + 1] + subtime, offsets[3], slice.GetVariable()); using (FileStream fs = File.Open(@filename, FileMode.Open)) { // Read in the data you need. using (BinaryReader reader = new BinaryReader(fs)) { // Read in all floats. Debug.Assert(reader.BaseStream.Length >= sliceSize * sizeof(float)); fields[indexTime] = new ScalarField(grid); int indexSpace = 0; for (int z = offsets[2]; z < offsets[2] + sizeInFile[2]; ++z) { // Set file reader position to right start point. reader.BaseStream.Seek(z * _dimLengths[0] * _dimLengths[1] + offsets[1] * _dimLengths[0] + offsets[0], SeekOrigin.Begin); for (int y = offsets[1]; y < offsets[1] + sizeInFile[1]; ++y) { for (int x = offsets[0]; x < offsets[0] + sizeInFile[0]; ++x) { fields[indexTime][indexSpace++] = reader.ReadSingle(); } // Advance one line. reader.BaseStream.Seek((_dimLengths[0] - sizeInFile[0]) * sizeof(float), SeekOrigin.Current); } } } } // Change Endian of data. if (!Range.CorrectEndian) { fields[indexTime].ChangeEndian(); for (int i = 0; i < fields[indexTime].Data.Length; ++i) { if (fields[indexTime].Data[i] == 0) { fields[indexTime].Data[i] = float.MaxValue; } } fields[indexTime].InvalidValue = float.MaxValue; } // Go on to next file. indexTime++; } } return(new ScalarFieldUnsteady(fields, offsets[spaceDims] * _dimLengths[spaceDims + 1] + offsets[spaceDims + 1])); }
public override VectorFieldUnsteady LoadTimeVectorField(SliceRange[] slices, int starttime, int timelength) { ScalarFieldUnsteady[] fields = new ScalarFieldUnsteady[slices.Length]; for (int i = 0; i < slices.Length; ++i) fields[i] = LoadTimeSlices(slices[i]); return new VectorFieldUnsteady(fields); }
/// <summary> /// Load a slice from the file. /// </summary> /// <param name="slice">Carries variable to load, dimensions in file and what to load.</param> /// <returns></returns> public override ScalarField LoadFieldSlice(SliceRange slice) { ScalarField field; Index offsets = new Index(slice.GetOffsets()); NetCDF.ResultCode ncState = NetCDF.ResultCode.NC_NOERR; //int[] sizeInFile = new int[offsets.Length]; int[] sizeInFile = slice.GetLengths(); // Probably has less dimensions. int[] sizeField = new int[offsets.Length]; int numDimsField = 0; //int currDimSlice = 0; for (int dim = 0; dim < offsets.Length; ++dim) { if (offsets[dim] != -1 && sizeInFile[dim] > 1) { sizeField[numDimsField++] = sizeInFile[dim]; } // Include whole dimension. else if (offsets[dim] == -1) { // Fill size. int sizeDim; ncState = NetCDF.nc_inq_dimlen(_fileID, slice.GetDimensionID(dim), out sizeDim); Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR); sizeInFile[dim] = sizeDim; // Set offset to one. offset = 0, size = size of dimension. offsets[dim] = 0; // Save size in size-vector for the scalar field. sizeField[numDimsField++] = sizeDim; } } //if (slice.IsTimeDependent()) // numDimsField++; // Generate size index for field class. Index fieldSize = new Index(numDimsField); Array.Copy(sizeField, fieldSize.Data, numDimsField); // When the field has several time slices, add a time dimension. //if (slice.IsTimeDependent()) // fieldSize[numDimsField - 1] = slice.GetNumTimeSlices(); // Change order of dimensions, so that fastest dimension is at the end. for (int dim = 0; dim < fieldSize.Length / 2; ++dim) { int tmp = fieldSize[dim]; fieldSize[dim] = fieldSize[fieldSize.Length - 1 - dim]; fieldSize[fieldSize.Length - 1 - dim] = tmp; } // Create a grid descriptor for the field. // TODO: Actually load this data. RectlinearGrid grid = new RectlinearGrid(fieldSize);//, new Vector(0.0f, fieldSize.Length), new Vector(0.1f, fieldSize.Length)); // Create scalar field instance and fill it with data. field = new ScalarField(grid); int sliceSize = grid.Size.Product();// / slice.GetNumTimeSlices(); // Get data. x64 dll fails in debug here... ncState = NetCDF.nc_get_vara_float(_fileID, (int)slice.GetVariable(), offsets.Data, sizeInFile, field.Data); Debug.Assert(ncState == NetCDF.ResultCode.NC_NOERR, ncState.ToString()); // Read in invalid value. float[] invalidval = new float[1]; ncState = NetCDF.nc_get_att_float(_fileID, (int)slice.GetVariable(), "_FillValue", invalidval); field.InvalidValue = invalidval[0]; return(field); }