コード例 #1
0
ファイル: DatasetHelper.cs プロジェクト: spdjudd/sharpHDF
        public static Hdf5Dataset CreateDatasetAddToDatasets(
            ReadonlyNamedItemList <Hdf5Dataset> _datasets,
            Hdf5Identifier _fileId,
            Hdf5Path _parentPath,
            string _name,
            Hdf5DataTypes _datatype,
            int _numberOfDimensions,
            List <Hdf5DimensionProperty> _properties,
            Hdf5CompressionProperty _compressionProperty = null)
        {
            Hdf5Dataset dataset = CreateDataset(
                _fileId,
                _parentPath,
                _name,
                _datatype,
                _numberOfDimensions,
                _properties,
                _compressionProperty);

            if (dataset != null)
            {
                _datasets.Add(dataset);
            }

            return(dataset);
        }
コード例 #2
0
ファイル: DatasetHelper.cs プロジェクト: spdjudd/sharpHDF
        public static Hdf5Dataset CreateDataset(
            Hdf5Identifier _fileId,
            Hdf5Path _parentPath,
            string _name,
            Hdf5DataTypes _datatype,
            int _numberOfDimensions,
            List <Hdf5DimensionProperty> _properties,
            Hdf5CompressionProperty _compressionProperty = null)
        {
            Hdf5Path path = _parentPath.Append(_name);

            UInt64[] dimensionSize = new UInt64[_numberOfDimensions];
            UInt64[] maxSize       = null; // new UInt64[_numberOfDimensions];

            if (_numberOfDimensions != _properties.Count ||
                (_compressionProperty != null && _numberOfDimensions != _compressionProperty.ChunkDimensions.Length))
            {
                throw new Hdf5ArrayDimensionsMismatchException();
            }

            int i = 0;

            foreach (var property in _properties)
            {
                dimensionSize[i] = property.CurrentSize;

                if (_compressionProperty != null && _compressionProperty.ChunkDimensions[i] > property.CurrentSize)
                {
                    throw new Hdf5ArraySizeMismatchException();
                }

                i++;
            }

            Hdf5Identifier dataspaceId = H5S.create_simple(_numberOfDimensions, dimensionSize, maxSize).ToId();

            //TODO handle string datasets
            Hdf5Identifier typeId = H5T.copy(TypeHelper.GetNativeType(_datatype).Value).ToId();
            var            status = H5T.set_order(typeId.Value, H5T.order_t.LE);

            var plist_id = _compressionProperty != null?H5P.create(H5P.DATASET_CREATE) : 0;

            if (plist_id != 0)
            {
                H5P.set_chunk(plist_id, _compressionProperty.ChunkDimensions.Length, _compressionProperty.ChunkDimensions);
                H5P.set_deflate(plist_id, _compressionProperty.CompressionLevel);
            }

            Hdf5Identifier datasetId = H5D.create(_fileId.Value, path.FullPath, typeId.Value, dataspaceId.Value, dcpl_id: plist_id).ToId();

            Hdf5Dataset dataset = null;

            if (datasetId.Value > 0)
            {
                dataset = new Hdf5Dataset(_fileId, datasetId, path.FullPath)
                {
                    DataType  = TypeHelper.GetDataTypeFromDataset(datasetId),
                    Dataspace = DataspaceHelper.GetDataspace(datasetId)
                };

                H5D.close(datasetId.Value);
            }

            H5T.close(typeId.Value);
            if (plist_id != 0)
            {
                H5P.close(plist_id);
            }

            FileHelper.FlushToFile(_fileId);

            return(dataset);
        }