Esempio n. 1
0
        public static (long DatasetId, bool IsNew) OpenOrCreateDataset(long locationId, string datasetPath, long datasetTypeId, ulong chunkLength, ulong chunkCount, IntPtr fillValue = default)
        {
            return(IOHelper.OpenOrCreateDataset(locationId, datasetPath, datasetTypeId, () =>
            {
                long dcPropertyId = -1;
                long lcPropertyId = -1;
                long dataspaceId = -1;
                long datasetId = -1;

                try
                {
                    dcPropertyId = H5P.create(H5P.DATASET_CREATE);

                    if (fillValue != IntPtr.Zero)
                    {
                        H5P.set_fill_value(dcPropertyId, datasetTypeId, fillValue);
                    }

                    H5P.set_shuffle(dcPropertyId);
                    H5P.set_deflate(dcPropertyId, 7);
                    H5P.set_chunk(dcPropertyId, 1, new ulong[] { chunkLength });

                    lcPropertyId = H5P.create(H5P.LINK_CREATE);
                    H5P.set_create_intermediate_group(lcPropertyId, 1);

                    dataspaceId = H5S.create_simple(1, new ulong[] { chunkLength *chunkCount }, null);
                    datasetId = H5D.create(locationId, datasetPath, datasetTypeId, dataspaceId, lcPropertyId, dcPropertyId);

                    if (H5I.is_valid(datasetId) <= 0)
                    {
                        throw new Exception($"{ ErrorMessage.IOHelper_CouldNotOpenOrCreateDataset } Dataset: '{ datasetPath }'.");
                    }
                }
                finally
                {
                    if (H5I.is_valid(dcPropertyId) > 0)
                    {
                        H5P.close(dcPropertyId);
                    }
                    if (H5I.is_valid(lcPropertyId) > 0)
                    {
                        H5P.close(lcPropertyId);
                    }
                    if (H5I.is_valid(dataspaceId) > 0)
                    {
                        H5S.close(dataspaceId);
                    }
                }

                return datasetId;
            }));
        }
Esempio n. 2
0
        public HDF5DataSet CreateDataset(string name, ulong[] shape, Type dType, long maxSize = 1,
                                         bool[] unlimited = null, ulong[] chunkShape = null, bool compress = false)
        {
            HDF5DataSet result = null;

            With((id) =>
            {
                int nDims = shape.Length;
                if (unlimited == null)
                {
                    unlimited = Enumerable.Range(0, nDims).Select(d => false).ToArray();
                }

                ulong[] maxShape =
                    Enumerable.Range(0, nDims).Select(d => unlimited[d] ? H5S.UNLIMITED : shape[d]).ToArray();
                var dataspaceID = H5S.create_simple(nDims, shape, maxShape);
                long dataTypeID = HDF5DataSet.OpenHDFDataType(dType, maxSize);

                long creationPropertyList = 0L;
                if (compress)
                {
                    if (chunkShape == null)
                    {
                        chunkShape = shape;
                    }

                    creationPropertyList = H5P.create(H5P.DATASET_CREATE);
                    H5P.set_layout(creationPropertyList, H5D.layout_t.CHUNKED);
                    H5P.set_deflate(creationPropertyList, 9);
                    H5P.set_chunk(creationPropertyList, shape.Length, chunkShape);
                }

                var newID = H5D.create(id, name, dataTypeID, dataspaceID, 0L, creationPropertyList, 0L);

                if (creationPropertyList > 0)
                {
                    H5P.close(creationPropertyList);
                }
                H5T.close(dataTypeID);
                H5S.close(dataspaceID);

                if (newID <= 0)
                {
                    throw new H5SSException("Couldn't create DataSet");
                }

                // write!
                H5D.close(newID);
                result = new HDF5DataSet(name, this);
            });
            return(result);
        }
Esempio n. 3
0
        public static unsafe void AddChunkedDatasetForHyperslab(long fileId)
        {
            long res;

            var dcpl_id   = H5P.create(H5P.DATASET_CREATE);
            var dims      = new ulong[] { 25, 25, 4 };
            var chunkDims = new ulong[] { 7, 20, 3 };

            res = H5P.set_chunk(dcpl_id, 3, chunkDims);

            TestUtils.Add(ContainerType.Dataset, fileId, "chunked", "hyperslab", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id);
            res = H5P.close(dcpl_id);
        }
Esempio n. 4
0
        public static unsafe void AddFilteredDataset_ZLib(long fileId)
        {
            long res;

            var length  = (ulong)TestData.MediumData.Length / 4;
            var dims    = new ulong[] { length, 4 };
            var dcpl_id = H5P.create(H5P.DATASET_CREATE);

            res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 1000, 4 });
            res = H5P.set_filter(dcpl_id, H5Z.filter_t.DEFLATE, 0, new IntPtr(1), new uint[] { 5 } /* compression level */);

            TestUtils.Add(ContainerType.Dataset, fileId, "filtered", $"deflate", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id);
            res = H5P.close(dcpl_id);
        }
Esempio n. 5
0
        public static unsafe void AddChunkedDataset_Implicit(long fileId)
        {
            long res;

            var length  = (ulong)TestData.MediumData.Length / 4;
            var dims    = new ulong[] { length, 4 };
            var dcpl_id = H5P.create(H5P.DATASET_CREATE);

            res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 1000, 3 });
            res = H5P.set_alloc_time(dcpl_id, H5D.alloc_time_t.EARLY);

            TestUtils.Add(ContainerType.Dataset, fileId, "chunked", "chunked_implicit", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id);
            res = H5P.close(dcpl_id);
        }
Esempio n. 6
0
        public void H5DOappendTestSWMR1()
        {
            hsize_t[] dims       = { 6, 0 };
            hsize_t[] maxdims    = { 6, H5S.UNLIMITED };
            hsize_t[] chunk_dims = { 2, 5 };
            int[]     cbuf       = new int [6];

            hid_t dsp = H5S.create_simple(2, dims, maxdims);

            Assert.IsTrue(dsp >= 0);

            hid_t dcpl = H5P.create(H5P.DATASET_CREATE);

            Assert.IsTrue(dcpl >= 0);
            Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk_dims) >= 0);

            hsize_t[] boundary = { 0, 1 };

            hid_t dapl = H5P.create(H5P.DATASET_ACCESS);

            Assert.IsTrue(dapl >= 0);
            H5D.append_cb_t cb = DOappend_func;
            Assert.IsTrue(
                H5P.set_append_flush(dapl, 2, boundary, cb, new IntPtr(99)) >= 0);

            hid_t dst = H5D.create(m_v3_test_file_swmr, "dset",
                                   H5T.NATIVE_INT, dsp, H5P.DEFAULT, dcpl, dapl);

            Assert.IsTrue(dst >= 0);

            GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned);

            for (int i = 0; i < 3; ++i)
            {
                for (int j = 0; j < 6; ++j)
                {
                    cbuf[j] = ((i * 6) + (j + 1)) * -1;
                }
                Assert.IsTrue(
                    H5DO.append(dst, H5P.DEFAULT, 1, new IntPtr(1),
                                H5T.NATIVE_INT, hnd.AddrOfPinnedObject()) >= 0);
            }

            hnd.Free();

            Assert.IsTrue(H5D.close(dst) >= 0);
            Assert.IsTrue(H5P.close(dapl) >= 0);
            Assert.IsTrue(H5P.close(dcpl) >= 0);
            Assert.IsTrue(H5S.close(dsp) >= 0);
        }
Esempio n. 7
0
        public void AppendOrCreateDataset(Array dataset)
        {
            if (_chunkDims == null)
            {
                if (dataset.Rank < 1)
                {
                    string msg = "Empty array was passed. Ignoring.";
                    Hdf5Utils.LogError?.Invoke(msg);
                    return;
                }

                for (int dimension = 1; dimension <= dataset.Rank; dimension++)
                {
                    var size = dataset.GetUpperBound(dimension - 1) + 1;
                    if (size == 0)
                    {
                        string msg = $"Empty array was passed for dimension {dimension}. Ignoring.";
                        Hdf5Utils.LogError?.Invoke(msg);
                        return;
                    }
                }
                _chunkDims = new[]
                { Convert.ToUInt64(dataset.GetLongLength(0)), Convert.ToUInt64(dataset.GetLongLength(1)) };

                Rank         = dataset.Rank;
                _currentDims = GetDims(dataset);

                /* Create the data space with unlimited dimensions. */
                _spaceId = H5S.create_simple(Rank, _currentDims, _maxDims);

                /* Modify dataset creation properties, i.e. enable chunking  */
                _propId = H5P.create(H5P.DATASET_CREATE);
                _status = H5P.set_chunk(_propId, Rank, _chunkDims);

                /* Create a new dataset within the file using chunk creation properties.  */
                _datasetId = H5D.create(GroupId, Hdf5Utils.NormalizedName(Datasetname), _datatype, _spaceId, H5P.DEFAULT, _propId);

                /* Write data to dataset */
                GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned);
                _status = H5D.write(_datasetId, _datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT,
                                    hnd.AddrOfPinnedObject());
                hnd.Free();
                H5S.close(_spaceId);
                _spaceId = -1;
            }
            else
            {
                AppendDataset(dataset);
            }
        }
Esempio n. 8
0
        public static unsafe void AddFilteredDataset_Fletcher(long fileId)
        {
            long res;

            var length  = (ulong)TestData.MediumData.Length / 4;
            var dims    = new ulong[] { length, 4 };
            var dcpl_id = H5P.create(H5P.DATASET_CREATE);

            res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 1000, 4 });
            res = H5P.set_fletcher32(dcpl_id);

            TestUtils.Add(ContainerType.Dataset, fileId, "filtered", $"fletcher", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id);
            res = H5P.close(dcpl_id);
        }
Esempio n. 9
0
        /// <summary>
        /// Create a new hdf5 `H5DataSet` at `loc_id`.
        /// </summary>
        /// <remarks>
        /// `maxdims` may be `null` in which case it is set to `dims`.
        /// </remarks>
        internal static H5DataSet Create(hid_t loc_id, string key, int rank, long[] dims, long[] maxdims, Type primitive_type)
        {
            hid_t dcpl;  // the 'dataset creation property list' controls chunking..

            if (maxdims == null || dims.SequenceEqual(maxdims))
            {
                dcpl = H5P.DEFAULT;
            }
            else if (HasH5Pcreate)
            {
                // ..which is needed for later resizing:
                var chunk = new ulong[rank];
                // the chunk is of size 1 in each 'unlimited' dimension and of size 'maxdims'
                // for all other dimensions (just like the 'SPECdata/Intensities' dataset):
                for (int i = 0; i < rank; i++)
                {
                    if (maxdims[i] == H5Space.Unlimited)
                    {
                        chunk[i] = 1UL;
                    }
                    else if (maxdims[i] > 0)
                    {
                        checked { chunk[i] = (ulong)maxdims[i]; }
                    }
                    else
                    {
                        throw new ArgumentException($"invalid value in parameter 'maxdims'");
                    }
                }
                dcpl = H5P.create(H5P.DATASET_CREATE);
                H5P.set_chunk(dcpl, rank, chunk);
            }
            else
            {
                maxdims = dims;
                dcpl    = H5P.DEFAULT;
            }
            hid_t id;

            using (H5Space space = H5Space.Create(rank, dims, maxdims))
                using (H5Type dtype = H5Type.Create(primitive_type))
                {
                    if ((id = H5D.create(loc_id, key, dtype.ID, space.ID, H5P.DEFAULT, dcpl, H5P.DEFAULT)) < 0)
                    {
                        throw new H5LibraryException($"H5D.create() returned ({id})");
                    }
                }
            return(FromID(id));
        }
Esempio n. 10
0
        public void H5Dget_num_chunksTest1()
        {
            hsize_t[] dims     = { 10, 10 };
            hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED };
            hid_t     space    = H5S.create_simple(2, dims, max_dims);

            hid_t dcpl = H5P.create(H5P.DATASET_CREATE);

            Assert.IsTrue(dcpl >= 0);
            hsize_t[] chunk = { 4, 4 };
            Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk) >= 0);
            Assert.IsTrue(H5P.set_alloc_time(dcpl, H5D.alloc_time_t.EARLY) >= 0);
            Assert.IsTrue(H5P.set_fill_time(dcpl, H5D.fill_time_t.ALLOC) >= 0);

            hid_t dset = H5D.create(m_v0_test_file, "Early Bird", H5T.IEEE_F32BE,
                                    space, H5P.DEFAULT, dcpl);

            Assert.IsTrue(dset >= 0);

            // This should work but doesn't:
            // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0);
            hid_t fspace = H5D.get_space(dset);

            Assert.IsTrue(fspace >= 0);
            Assert.IsTrue(H5S.select_all(fspace) >= 0);

            hsize_t nchunks = 0;

            Assert.IsTrue(H5D.get_num_chunks(dset, fspace, ref nchunks) >= 0);
            Assert.IsTrue(nchunks == 9);

            Assert.IsTrue(H5D.close(dset) >= 0);

            dset = H5D.create(m_v2_test_file, "Early Bird", H5T.IEEE_F32BE,
                              space, H5P.DEFAULT, dcpl);
            Assert.IsTrue(dset >= 0);

            // This should work but doesn't:
            // Assert.IsTrue(H5D.get_num_chunks(dset, H5S.ALL, ref nchunks) >= 0);
            fspace = H5D.get_space(dset);
            Assert.IsTrue(fspace >= 0);
            Assert.IsTrue(H5S.select_all(fspace) >= 0);

            nchunks = 0;
            Assert.IsTrue(H5D.get_num_chunks(dset, fspace, ref nchunks) >= 0);
            Assert.IsTrue(nchunks == 9);

            Assert.IsTrue(H5D.close(dset) >= 0);
        }
Esempio n. 11
0
        public void H5Pset_chunk_optsTestSWMR1()
        {
            hid_t dcpl = H5P.create(H5P.DATASET_CREATE);

            Assert.IsTrue(dcpl >= 0);

            // without chunking, H5Pset_chunk_opts will throw an error
            hsize_t[] dims = { 4711 };
            Assert.IsTrue(H5P.set_chunk(dcpl, 1, dims) >= 0);

            uint opts = H5D.DONT_FILTER_PARTIAL_CHUNKS;

            Assert.IsTrue(H5P.set_chunk_opts(dcpl, opts) >= 0);

            Assert.IsTrue(H5P.close(dcpl) >= 0);
        }
Esempio n. 12
0
        public static void ClassInit(TestContext testContext)
        {
#if HDF5_VER1_10
            // create test files which persists across file tests
            m_v3_class_file = Utilities.H5TempFileSWMR(ref m_v3_class_file_name);
            Assert.IsTrue(m_v3_class_file >= 0);

            m_lcpl = H5P.create(H5P.LINK_CREATE);
            Assert.IsTrue(H5P.set_create_intermediate_group(m_lcpl, 1) >= 0);

            m_lcpl_utf8 = H5P.copy(m_lcpl);
            Assert.IsTrue(
                H5P.set_char_encoding(m_lcpl_utf8, H5T.cset_t.UTF8) >= 0);

            // create a sample dataset

            hsize_t[] dims       = { 6, 6 };
            hsize_t[] maxdims    = { 6, H5S.UNLIMITED };
            hsize_t[] chunk_dims = { 2, 5 };
            int[]     cbuf       = new int[36];

            hid_t dsp = H5S.create_simple(2, dims, maxdims);
            Assert.IsTrue(dsp >= 0);

            hid_t dcpl = H5P.create(H5P.DATASET_CREATE);
            Assert.IsTrue(dcpl >= 0);
            Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk_dims) >= 0);

            hid_t dst = H5D.create(m_v3_class_file, "int6x6",
                                   H5T.NATIVE_INT, dsp, H5P.DEFAULT, dcpl);
            Assert.IsTrue(dst >= 0);

            GCHandle hnd = GCHandle.Alloc(cbuf, GCHandleType.Pinned);

            Assert.IsTrue(H5D.write(dst, H5T.NATIVE_INT, H5S.ALL, H5S.ALL,
                                    H5P.DEFAULT, hnd.AddrOfPinnedObject()) >= 0);

            hnd.Free();

            Assert.IsTrue(H5D.flush(dst) >= 0);

            Assert.IsTrue(H5D.close(dst) >= 0);
            Assert.IsTrue(H5P.close(dcpl) >= 0);
            Assert.IsTrue(H5S.close(dsp) >= 0);
#endif
        }
Esempio n. 13
0
        public static unsafe void AddChunkedDataset_Fixed_Array_Paged(long fileId, bool withShuffle)
        {
            long res;

            var length  = (ulong)TestData.MediumData.Length / 4;
            var dims    = new ulong[] { length, 4 };
            var dcpl_id = H5P.create(H5P.DATASET_CREATE);

            res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 1, 3 });

            if (withShuffle)
            {
                res = H5P.set_shuffle(dcpl_id);
            }

            TestUtils.Add(ContainerType.Dataset, fileId, "chunked", "chunked_fixed_array_paged", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims, cpl: dcpl_id);
            res = H5P.close(dcpl_id);
        }
Esempio n. 14
0
        public static unsafe void AddChunkedDatasetWithFillValueAndAllocationLate(long fileId, int fillValue)
        {
            long res;

            var length  = (ulong)TestData.MediumData.Length;
            var dcpl_id = H5P.create(H5P.DATASET_CREATE);

            res = H5P.set_alloc_time(dcpl_id, H5D.alloc_time_t.LATE);
            res = H5P.set_chunk(dcpl_id, 1, new ulong[] { 1000 });

            var handle = GCHandle.Alloc(BitConverter.GetBytes(fillValue), GCHandleType.Pinned);

            H5P.set_fill_value(dcpl_id, H5T.NATIVE_INT, handle.AddrOfPinnedObject());
            handle.Free();

            TestUtils.Add(ContainerType.Dataset, fileId, "fillvalue", $"{LayoutClass.Chunked}", H5T.NATIVE_INT32, (void *)0, length, cpl: dcpl_id);
            res = H5P.close(dcpl_id);
        }
Esempio n. 15
0
        public void H5PglobalsTest1()
        {
            hsize_t[] dims     = { 10, 10, 10 };
            hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED, H5S.UNLIMITED };
            hid_t     space    = H5S.create_simple(3, dims, max_dims);

            hid_t lcpl = H5P.create(H5P.LINK_CREATE);

            Assert.IsTrue(H5P.set_create_intermediate_group(lcpl, 1) >= 0);

            hid_t dcpl = H5P.create(H5P.DATASET_CREATE);

            Assert.IsTrue(dcpl >= 0);
            hsize_t[] chunk = { 64, 64, 64 };
            Assert.IsTrue(H5P.set_chunk(dcpl, 3, chunk) >= 0);
            Assert.IsTrue(H5P.set_deflate(dcpl, 9) >= 0);

            hid_t dset = H5D.create(m_v0_test_file, "A/B/C", H5T.IEEE_F32BE,
                                    space, lcpl, dcpl);

            Assert.IsTrue(dset >= 0);

            hid_t plist = H5D.get_create_plist(dset);

            Assert.IsTrue(H5D.layout_t.CHUNKED == H5P.get_layout(plist));
            Assert.IsTrue(H5P.close(plist) >= 0);

            Assert.IsTrue(H5D.close(dset) >= 0);

            dset = H5D.create(m_v2_test_file, "A/B/C", H5T.IEEE_F32BE,
                              space, lcpl, dcpl);
            Assert.IsTrue(dset >= 0);

            plist = H5D.get_create_plist(dset);
            Assert.IsTrue(H5D.layout_t.CHUNKED == H5P.get_layout(plist));
            Assert.IsTrue(H5P.close(plist) >= 0);

            Assert.IsTrue(H5D.close(dset) >= 0);

            Assert.IsTrue(H5P.close(dcpl) >= 0);
            Assert.IsTrue(H5P.close(lcpl) >= 0);
            Assert.IsTrue(H5S.close(space) >= 0);
        }
Esempio n. 16
0
        public static unsafe void AddChunkedDataset_Extensible_Array_Secondary_Blocks(long fileId, bool withShuffle)
        {
            long res;

            var length  = (ulong)TestData.MediumData.Length / 4;
            var dims0   = new ulong[] { length, 4 };
            var dims1   = new ulong[] { H5S.UNLIMITED, 4 };
            var dcpl_id = H5P.create(H5P.DATASET_CREATE);

            res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 3, 3 });

            if (withShuffle)
            {
                res = H5P.set_shuffle(dcpl_id);
            }

            TestUtils.Add(ContainerType.Dataset, fileId, "chunked", "chunked_extensible_array_secondary_blocks", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims0, dims1, cpl: dcpl_id);
            res = H5P.close(dcpl_id);
        }
Esempio n. 17
0
        public static unsafe void AddChunkedDataset_BTree2(long fileId, bool withShuffle)
        {
            long res;

            var length  = (ulong)TestData.MediumData.Length / 4;
            var dims0   = new ulong[] { length, 4 };
            var dims1   = new ulong[] { H5S.UNLIMITED, H5S.UNLIMITED };
            var dcpl_id = H5P.create(H5P.DATASET_CREATE);

            res = H5P.set_chunk(dcpl_id, 2, new ulong[] { 1000, 3 });

            if (withShuffle)
            {
                res = H5P.set_shuffle(dcpl_id);
            }

            TestUtils.Add(ContainerType.Dataset, fileId, "chunked", "chunked_btree2", H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), dims0, dims1, cpl: dcpl_id);

            res = H5P.close(dcpl_id);
        }
Esempio n. 18
0
        public static unsafe void AddFilteredDataset_Shuffle(long fileId, int bytesOfType, int length, Span <byte> dataset)
        {
            long res;

            var dcpl_id = H5P.create(H5P.DATASET_CREATE);

            res = H5P.set_chunk(dcpl_id, 1, new ulong[] { (ulong)length });
            res = H5P.set_shuffle(dcpl_id);

            var typeId = bytesOfType switch
            {
                1 => H5T.NATIVE_UINT8,
                2 => H5T.NATIVE_UINT16,
                4 => H5T.NATIVE_UINT32,
                8 => H5T.NATIVE_UINT64,
                _ => throw new Exception($"The value '{bytesOfType}' of the 'bytesOfType' parameter is not within the valid range.")
            };

            TestUtils.Add(ContainerType.Dataset, fileId, "filtered", $"shuffle_{bytesOfType}", typeId, dataset, (ulong)length, cpl: dcpl_id);
            res = H5P.close(dcpl_id);
        }
        public void H5Dget_chunk_info_by_coordTest2()
        {
            hsize_t[] dims     = { 10, 10 };
            hsize_t[] max_dims = { H5S.UNLIMITED, H5S.UNLIMITED };
            hid_t     space    = H5S.create_simple(2, dims, max_dims);

            hid_t dcpl = H5P.create(H5P.DATASET_CREATE);

            Assert.IsTrue(dcpl >= 0);
            hsize_t[] chunk = { 4, 4 };
            Assert.IsTrue(H5P.set_chunk(dcpl, 2, chunk) >= 0);

            hid_t dset = H5D.create(m_v0_test_file, "Early Bird3", H5T.IEEE_F32BE,
                                    space, H5P.DEFAULT, dcpl);

            Assert.IsTrue(dset >= 0);

            hsize_t size = 100;

            hsize_t[] offset      = { 1, 2 };
            uint32_t  filter_mask = 0;
            haddr_t   addr        = 0;

            Assert.IsTrue(H5D.get_chunk_info_by_coord(dset, offset, ref filter_mask, ref addr, ref size) >= 0);
            Assert.IsTrue(filter_mask == 0 && size == 0 && addr == H5.HADDR_UNDEF);

            Assert.IsTrue(H5D.close(dset) >= 0);

            dset = H5D.create(m_v2_test_file, "Early Bird3", H5T.IEEE_F32BE,
                              space, H5P.DEFAULT, dcpl);
            Assert.IsTrue(dset >= 0);

            size = 100;
            addr = 0;
            Assert.IsTrue(H5D.get_chunk_info_by_coord(dset, offset, ref filter_mask, ref addr, ref size) >= 0);
            Assert.IsTrue(filter_mask == 0 && size == 0 && addr == H5.HADDR_UNDEF);

            Assert.IsTrue(H5D.close(dset) >= 0);
        }
Esempio n. 20
0
        public void FirstDataset(Array dataset)
        {
            if (GroupId <= 0)
            {
                throw new Hdf5Exception("cannot call FirstDataset because group or file couldn't be created");
            }

            if (Hdf5Utils.GetRealName(GroupId, Datasetname, string.Empty).valid)
            {
                throw new Hdf5Exception("cannot call FirstDataset because dataset already exists");
            }

            Rank         = dataset.Rank;
            _currentDims = GetDims(dataset);

            /* Create the data space with unlimited dimensions. */
            _spaceId = H5S.create_simple(Rank, _currentDims, _maxDims);

            /* Modify dataset creation properties, i.e. enable chunking  */
            _propId = H5P.create(H5P.DATASET_CREATE);
            _status = H5P.set_chunk(_propId, Rank, _chunkDims);

            /* Create a new dataset within the file using chunk creation properties.  */
            _datasetId = H5D.create(GroupId, Hdf5Utils.NormalizedName(Datasetname), _datatype, _spaceId, H5P.DEFAULT, _propId);

            /* Write data to dataset */
            GCHandle hnd = GCHandle.Alloc(dataset, GCHandleType.Pinned);

            _status = H5D.write(_datasetId, _datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT,
                                hnd.AddrOfPinnedObject());
            if (_status < 0)
            {
                Hdf5Utils.LogError("Unable  to write dataset");
            }

            hnd.Free();
            H5S.close(_spaceId);
            _spaceId = -1;
        }
Esempio n. 21
0
        public static Hdf5Dataset CreateDataset(
            Hdf5Identifier _fileId,
            Hdf5Path _parentPath,
            string _name,
            Hdf5DataTypes _datatype,
            int _numberOfDimensions,
            List <Hdf5DimensionProperty> _properties,
            Hdf5CompressionProperty _compressionProperty = null)
        {
            Hdf5Path path = _parentPath.Append(_name);

            UInt64[] dimensionSize = new UInt64[_numberOfDimensions];
            UInt64[] maxSize       = null; // new UInt64[_numberOfDimensions];

            if (_numberOfDimensions != _properties.Count ||
                (_compressionProperty != null && _numberOfDimensions != _compressionProperty.ChunkDimensions.Length))
            {
                throw new Hdf5ArrayDimensionsMismatchException();
            }

            int i = 0;

            foreach (var property in _properties)
            {
                dimensionSize[i] = property.CurrentSize;

                if (_compressionProperty != null && _compressionProperty.ChunkDimensions[i] > property.CurrentSize)
                {
                    throw new Hdf5ArraySizeMismatchException();
                }

                i++;
            }

            Hdf5Identifier dataspaceId = H5S.create_simple(_numberOfDimensions, dimensionSize, maxSize).ToId();

            //TODO handle string datasets
            Hdf5Identifier typeId = H5T.copy(TypeHelper.GetNativeType(_datatype).Value).ToId();
            var            status = H5T.set_order(typeId.Value, H5T.order_t.LE);

            var plist_id = _compressionProperty != null?H5P.create(H5P.DATASET_CREATE) : 0;

            if (plist_id != 0)
            {
                H5P.set_chunk(plist_id, _compressionProperty.ChunkDimensions.Length, _compressionProperty.ChunkDimensions);
                H5P.set_deflate(plist_id, _compressionProperty.CompressionLevel);
            }

            Hdf5Identifier datasetId = H5D.create(_fileId.Value, path.FullPath, typeId.Value, dataspaceId.Value, dcpl_id: plist_id).ToId();

            Hdf5Dataset dataset = null;

            if (datasetId.Value > 0)
            {
                dataset = new Hdf5Dataset(_fileId, datasetId, path.FullPath)
                {
                    DataType  = TypeHelper.GetDataTypeFromDataset(datasetId),
                    Dataspace = DataspaceHelper.GetDataspace(datasetId)
                };

                H5D.close(datasetId.Value);
            }

            H5T.close(typeId.Value);
            if (plist_id != 0)
            {
                H5P.close(plist_id);
            }

            FileHelper.FlushToFile(_fileId);

            return(dataset);
        }
Esempio n. 22
0
        public static hid_t CreateDataSet(hid_t hid, string dataSetName, hid_t typeId, ulong[] dims, ulong[] maxDims = null, ulong[] chunk = null, object fillValue = null)
        {
            if (string.IsNullOrEmpty(dataSetName))
            {
                throw new ArgumentException("dataSetName");
            }

            if (dims == null)
            {
                throw new ArgumentNullException("dims");
            }

            if (maxDims != null && maxDims.Length != dims.Length)
            {
                throw new ArgumentException("dims and maxDims should have the identical length.");
            }

            var chunked = false;

            if (maxDims != null)
            {
                for (int i = 0; i < dims.Length; ++i)
                {
                    if (maxDims[i] != H5S.UNLIMITED && maxDims[i] < dims[i])
                    {
                        throw new ArgumentException("maxDims[i] < dims[i]");
                    }

                    if (!chunked)
                    {
                        chunked = maxDims[i] == H5S.UNLIMITED;
                    }
                }
            }

            if (chunked)
            {
                if (chunk == null)
                {
                    throw new ArgumentNullException("chunk");
                }

                if (chunk.Length != dims.Length)
                {
                    throw new ArgumentException("chunk");
                }

                if (chunk.Any(size => size <= 0))
                {
                    throw new ArgumentException("All dimensions should have a positive length.");
                }
            }

            if (DataSetExists(hid, dataSetName))
            {
                throw new HDF5Exception("Data set \"{0}\" already exists.", dataSetName);
            }

            var space = H5S.create_simple(dims.Length, dims, maxDims);

            if (space < 0)
            {
                throw new HDF5Exception("H5S.create_simple failed.");
            }

            if (!chunked)
            {
                var dataSet = H5D.create(hid, dataSetName, typeId, space);

                H5S.close(space);

                if (dataSet < 0)
                {
                    throw new HDF5Exception("Failed to create data set \"{0}\".", dataSetName);
                }

                return(dataSet);
            }
            else
            {
                var createPropertyList = H5P.create(H5P.DATASET_CREATE);
                if (createPropertyList < 0)
                {
                    H5S.close(space);
                    throw new HDF5Exception("Failed to create property list for chunked data set \"{0}\".", dataSetName);
                }

                if (H5P.set_chunk(createPropertyList, chunk.Length, chunk) < 0)
                {
                    H5S.close(space);
                    throw new HDF5Exception("H5P.set_chunk failed.");
                }

                var dataSet = H5D.create(hid, dataSetName, typeId, space, H5P.DEFAULT, createPropertyList, H5P.DEFAULT);

                H5S.close(space);
                H5P.close(createPropertyList);

                if (dataSet < 0)
                {
                    throw new HDF5Exception("Failed to create chunked data set \"{0}\".", dataSetName);
                }

                return(dataSet);
            }
        }
        /// <summary>
        /// Appends a dataset to a hdf5 file. If called the first time a dataset is created
        /// </summary>
        /// <typeparam name="T">Generic parameter only primitive types are allowed</typeparam>
        /// <param name="groupId">id of the group. Can also be a file Id</param>
        /// <param name="name">name of the dataset</param>
        /// <param name="dset">The dataset</param>
        /// <returns>status of the write method</returns>
        public static hid_t AppendDataset <T>(hid_t groupId, string name, Array dset, ulong chunkX = 200) where T : struct
        {
            var rank = dset.Rank;

            ulong[] dimsExtend = Enumerable.Range(0, rank).Select(i =>
                                                                  { return((ulong)dset.GetLength(i)); }).ToArray();
            ulong[] maxDimsExtend = null;
            ulong[] dimsChunk = new ulong[] { chunkX }.Concat(dimsExtend.Skip(1)).ToArray();
            ulong[] zeros = Enumerable.Range(0, rank).Select(z => (ulong)0).ToArray();
            hid_t   status, spaceId, datasetId;


            // name = ToHdf5Name(name);
            var datatype      = GetDatatype(typeof(T));
            var typeId        = H5T.copy(datatype);
            var datasetExists = H5L.exists(groupId, name) > 0;

            /* Create a new dataset within the file using chunk
             * creation properties.  */
            if (!datasetExists)
            {
                spaceId = H5S.create_simple(dset.Rank, dimsExtend, maxDimsExtend);

                var propId = H5P.create(H5P.DATASET_CREATE);
                status    = H5P.set_chunk(propId, rank, dimsChunk);
                datasetId = H5D.create(groupId, name, datatype, spaceId,
                                       H5P.DEFAULT, propId, H5P.DEFAULT);
                /* Write data to dataset */
                GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned);
                status = H5D.write(datasetId, datatype, H5S.ALL, H5S.ALL, H5P.DEFAULT,
                                   hnd.AddrOfPinnedObject());
                hnd.Free();
                H5P.close(propId);
            }
            else
            {
                datasetId = H5D.open(groupId, name);
                spaceId   = H5D.get_space(datasetId);
                var     rank_old = H5S.get_simple_extent_ndims(spaceId);
                ulong[] maxDims  = new ulong[rank_old];
                ulong[] dims     = new ulong[rank_old];
                var     memId1   = H5S.get_simple_extent_dims(spaceId, dims, maxDims);

                ulong[] oldChunk  = null;
                int     chunkDims = 0;
                var     propId    = H5P.create(H5P.DATASET_ACCESS);
                status = H5P.get_chunk(propId, chunkDims, oldChunk);

                /* Extend the dataset. */
                var size = new ulong[] { dims[0] + dimsExtend[0] }.Concat(dims.Skip(1)).ToArray();
                status = H5D.set_extent(datasetId, size);

                /* Select a hyperslab in extended portion of dataset  */
                var filespaceId = H5D.get_space(datasetId);
                var offset = new ulong[] { dims[0] }.Concat(zeros.Skip(1)).ToArray();
                status = H5S.select_hyperslab(filespaceId, H5S.seloper_t.SET, offset, null,
                                              dimsExtend, null);

                /* Define memory space */
                var memId2 = H5S.create_simple(rank, dimsExtend, null);

                /* Write the data to the extended portion of dataset  */
                GCHandle hnd = GCHandle.Alloc(dset, GCHandleType.Pinned);
                status = H5D.write(datasetId, datatype, memId2, spaceId,
                                   H5P.DEFAULT, hnd.AddrOfPinnedObject());
                hnd.Free();
                H5S.close(memId1);
                H5S.close(memId2);
                H5D.close(filespaceId);
            }

            H5D.close(datasetId);
            H5S.close(spaceId);
            return(status);
        }