public static unsafe void AddExternalDataset(long fileId, string datasetName, string absolutePrefix, H5DatasetAccess datasetAccess) { long res; var bytesoftype = 4; var dcpl_id = H5P.create(H5P.DATASET_CREATE); var dapl_id = H5P.create(H5P.DATASET_ACCESS); res = H5P.set_layout(dcpl_id, H5D.layout_t.CONTIGUOUS); // a (more than one chunk in file) var pathA = H5Utils.ConstructExternalFilePath(Path.Combine(absolutePrefix, $"{datasetName}_a.raw"), datasetAccess); if (File.Exists(pathA)) { File.Delete(pathA); } res = H5P.set_external(dcpl_id, pathA, new IntPtr(120), (ulong)(10 * bytesoftype)); res = H5P.set_external(dcpl_id, pathA, new IntPtr(80), (ulong)(10 * bytesoftype)); res = H5P.set_external(dcpl_id, pathA, new IntPtr(0), (ulong)(10 * bytesoftype)); // b (file size smaller than set size) var pathB = H5Utils.ConstructExternalFilePath(Path.Combine(absolutePrefix, $"{datasetName}_b.raw"), datasetAccess); if (File.Exists(pathB)) { File.Delete(pathB); } res = H5P.set_external(dcpl_id, pathB, new IntPtr(0), (ulong)(10 * bytesoftype)); // c (normal file) var pathC = H5Utils.ConstructExternalFilePath(Path.Combine(absolutePrefix, $"{datasetName}_c.raw"), datasetAccess); if (File.Exists(pathC)) { File.Delete(pathC); } res = H5P.set_external(dcpl_id, pathC, new IntPtr(0), (ulong)((TestData.MediumData.Length - 40) * bytesoftype)); // write data if (datasetAccess.ExternalFilePrefix is not null) { H5P.set_efile_prefix(dapl_id, datasetAccess.ExternalFilePrefix); } TestUtils.Add(ContainerType.Dataset, fileId, "external", datasetName, H5T.NATIVE_INT32, TestData.MediumData.AsSpan(), apl: dapl_id, cpl: dcpl_id); // truncate file b using (var fileStream2 = File.OpenWrite(pathB)) { fileStream2.SetLength(10); }; res = H5P.close(dapl_id); res = H5P.close(dcpl_id); }
public OpaquePropertyDescription(H5BinaryReader reader, byte tagByteLength) : base(reader) { this.Tag = H5Utils .ReadFixedLengthString(reader, tagByteLength) .TrimEnd('\0'); #warning How to avoid the appended '\0'? Is this caused by C# string passed to HDF5 lib? }
public SoftLinkInfo(H5BinaryReader reader) : base(reader) { // value length this.ValueLength = reader.ReadUInt16(); // value this.Value = H5Utils.ReadFixedLengthString(reader, this.ValueLength); }
public OldObjectModificationTimeMessage(H5BinaryReader reader) : base(reader) { // date / time this.Year = H5Utils.ReadFixedLengthString(reader, 4); this.Month = H5Utils.ReadFixedLengthString(reader, 2); this.DayOfMonth = H5Utils.ReadFixedLengthString(reader, 2); this.Hour = H5Utils.ReadFixedLengthString(reader, 2); this.Minute = H5Utils.ReadFixedLengthString(reader, 2); this.Second = H5Utils.ReadFixedLengthString(reader, 2); // reserved reader.ReadBytes(2); }
public VdsDatasetEntry(H5BinaryReader reader) : base(reader) { #warning Is reading null terminated string correct? // source file name this.SourceFileName = H5Utils.ReadNullTerminatedString(reader, pad: true); // source dataset this.SourceDataset = H5Utils.ReadNullTerminatedString(reader, pad: true); // source selection this.SourceSelection = new DataspaceSelection(reader); // virtual selection this.VirtualSelection = new DataspaceSelection(reader); }
public BTree2Record11(H5BinaryReader reader, Superblock superblock, byte rank, uint chunkSizeLength) { // address this.Address = superblock.ReadOffset(reader); // chunk size this.ChunkSize = H5Utils.ReadUlong(reader, chunkSizeLength); // filter mask this.FilterMask = reader.ReadUInt32(); // scaled offsets this.ScaledOffsets = new ulong[rank]; for (int i = 0; i < rank; i++) { this.ScaledOffsets[i] = reader.ReadUInt64(); } }
public static IEnumerable <Step> Walk(int rank, ulong[] dims, ulong[] chunkDims, HyperslabSelection selection) { /* check if there is anything to do */ if (selection.GetTotalCount() == 0) { yield break; } /* validate rank */ if (dims.Length != rank || chunkDims.Length != rank) { throw new RankException($"The length of each array parameter must match the rank parameter."); } /* prepare some useful arrays */ var lastDim = rank - 1; var offsets = new ulong[rank]; var stops = new ulong[rank]; var strides = new ulong[rank]; var blocks = new ulong[rank]; var gaps = new ulong[rank]; var scaledDatasetDims = new ulong[rank]; var chunkLength = chunkDims.Aggregate(1UL, (x, y) => x * y); for (int dimension = 0; dimension < rank; dimension++) { offsets[dimension] = selection.Starts[dimension]; stops[dimension] = selection.GetStop(dimension); strides[dimension] = selection.Strides[dimension]; blocks[dimension] = selection.Blocks[dimension]; gaps[dimension] = strides[dimension] - blocks[dimension]; scaledDatasetDims[dimension] = H5Utils.CeilDiv(dims[dimension], chunkDims[dimension]); } /* prepare last dimension variables */ var lastDimStop = stops[lastDim]; var lastDimBlock = blocks[lastDim]; var lastDimGap = gaps[lastDim]; var lastChunkDim = chunkDims[lastDim]; var supportsBulkCopy = lastDimGap == 0; /* loop until all data have been processed */ while (true) { /* compute number of consecutive points in current slice */ ulong totalLength; if (supportsBulkCopy) { totalLength = lastDimStop - offsets[lastDim]; } else { totalLength = lastDimBlock; } /* with the full length of consecutive points known, we continue with the chunk logic: * (there was an attempt to reduce the number of chunk calculations but that did not * result in significant performance improvements, so it has been reverted) */ { var remaining = totalLength; while (remaining > 0) { var scaledOffsets = new ulong[rank]; var chunkOffsets = new ulong[rank]; for (int dimension = 0; dimension < rank; dimension++) { scaledOffsets[dimension] = offsets[dimension] / chunkDims[dimension]; chunkOffsets[dimension] = offsets[dimension] % chunkDims[dimension]; } var offset = chunkOffsets.ToLinearIndex(chunkDims); var currentLength = Math.Min(lastChunkDim - chunkOffsets[lastDim], remaining); yield return(new Step() { Chunk = scaledOffsets, Offset = offset, Length = currentLength }); remaining -= currentLength; offsets[lastDim] += currentLength; } } /* add gap */ offsets[lastDim] += lastDimGap; /* iterate backwards through all dimensions */ for (int dimension = lastDim; dimension >= 0; dimension--) { if (dimension != lastDim) { /* go one step forward */ offsets[dimension] += 1; /* if we have reached a gap, skip that gap */ var consumedStride = (offsets[dimension] - selection.Starts[dimension]) % strides[dimension]; if (consumedStride == blocks[dimension]) { offsets[dimension] += gaps[dimension]; } } /* if the current slice is fully processed */ if (offsets[dimension] >= stops[dimension]) { /* if there is more to process, reset the offset and * repeat the loop for the next higher dimension */ if (dimension > 0) { offsets[dimension] = selection.Starts[dimension]; } /* else, we are done! */ else { yield break; } } /* otherwise, break the loop */ else { break; } } } }
public ObjectCommentMessage(H5BinaryReader reader) : base(reader) { // comment this.Comment = H5Utils.ReadNullTerminatedString(reader, pad: false); }