diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 38b828eff..72e40908b 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,6 +2,8 @@ name: CI on: push: + branches: + - master pull_request: branches: - master @@ -33,7 +35,19 @@ jobs: uses: actions-rs/cargo@v1 with: command: ${{matrix.command}} - args: "${{matrix.command == 'fmt' && '--all -- --check' || '-- -D warnings'}}" + args: "${{matrix.command == 'fmt' && '--all -- --check' || '--workspace --exclude hdf5-src -- -D warnings -D clippy::cargo'}}" + + doc: # This task should mirror the procedure on docs.rs + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: {submodules: true} + - name: Install Rust (${{matrix.rust}}) + uses: actions-rs/toolchain@v1 + with: {toolchain: nightly, profile: minimal, override: true} + - name: Document workspace + run: env RUSTDOCFLAGS="--cfg docsrs" cargo doc --features hdf5-sys/static,hdf5-sys/zlib,blosc,lzf brew: name: brew @@ -80,9 +94,11 @@ jobs: - {os: macos, version: 1.10.5, mpi: openmpi, channel: conda-forge, rust: beta} - {os: ubuntu, version: 1.10.6, channel: anaconda, rust: stable} - {os: ubuntu, version: 1.10.6, mpi: mpich, channel: conda-forge, rust: nightly} + # - {os: ubuntu, version: 1.10.8, channel: conda-forge, rust: stable} - {os: ubuntu, version: 1.12.0, mpi: openmpi, channel: conda-forge, rust: stable} - {os: macos, version: 1.12.0, channel: conda-forge, rust: stable} - {os: windows, version: 1.12.0, channel: conda-forge, rust: stable} + - {os: ubuntu, version: 1.12.1, channel: conda-forge, rust: stable} defaults: run: shell: bash -l {0} @@ -94,7 +110,7 @@ jobs: uses: actions-rs/toolchain@v1 with: {toolchain: '${{matrix.rust}}', profile: minimal, override: true} - name: Install conda - uses: goanpeca/setup-miniconda@v1 + uses: conda-incubator/setup-miniconda@v2 with: {auto-update-conda: false, activate-environment: testenv} - name: Install HDF5 (${{matrix.version}}${{matrix.mpi && '-' || ''}}${{matrix.mpi}}) run: | @@ -129,6 +145,39 @@ jobs: with: {toolchain: '${{matrix.rust}}', profile: minimal, override: true} - name: Build and test all crates run: cargo test --workspace -v --features hdf5-sys/static,hdf5-sys/zlib --exclude hdf5-derive + - name: Build and test with filters + run: cargo test --workspace -v --features hdf5-sys/static,hdf5-sys/zlib,lzf,blosc --exclude hdf5-derive + if: matrix.rust != 'stable-gnu' + - name: Run examples + run: | + cargo r --example simple --features hdf5-sys/static,hdf5-sys/zlib,lzf,blosc + cargo r --example chunking --features hdf5-sys/static,hdf5-sys/zlib,lzf,blosc + if: matrix.rust != 'stable-gnu' + + conda_dl: + name: static + runs-on: ${{matrix.os}}-latest + strategy: + fail-fast: false + matrix: + include: + - {os: ubuntu, rust: stable} + - {os: windows, rust: stable-msvc} + - {os: windows, rust: stable-gnu} + - {os: macos, rust: stable} + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: {submodules: true} + - name: Change to older toolchain + if: matrix.os == 'macos' + run: sudo xcode-select -s "/Applications/Xcode_11.7.app" + - name: Install Rust (${{matrix.rust}}) + uses: actions-rs/toolchain@v1 + with: {toolchain: '${{matrix.rust}}', profile: minimal, override: true} + - name: Build and test all crates + run: cargo test --workspace -v --features conda --exclude hdf5-derive + apt: name: apt @@ -137,7 +186,6 @@ jobs: fail-fast: false matrix: include: - - {ubuntu: 16.04, mpi: serial, rust: nightly} - {ubuntu: 18.04, mpi: mpich, rust: beta} - {ubuntu: 20.04, mpi: openmpi, rust: stable} steps: @@ -157,6 +205,12 @@ jobs: run: | [ "${{matrix.mpi}}" != "serial" ] && FEATURES=mpio cargo test -vv --features="$FEATURES" + - name: Test crate for locking on synchronisation + run: | + [ "${{matrix.mpi}}" != "serial" ] && FEATURES=mpio + cargo test -vv --features="$FEATURES" -- lock_part + cargo test -vv --features="$FEATURES" -- lock_part + cargo test -vv --features="$FEATURES" -- lock_part msi: name: msi @@ -165,7 +219,7 @@ jobs: fail-fast: false matrix: rust: [stable] - version: [1.8, '1.10'] + version: ["1.8", "1.10", "1.12", "1.13"] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -176,18 +230,26 @@ jobs: - name: Configure environment shell: bash run: | - if [ "${{matrix.version}}" == "1.8" ]; then + if [[ "${{matrix.version}}" == "1.8" ]]; then VERSION=1.8.21 DL_PATH=hdf5-1.8.21-Std-win7_64-vs14.zip - echo ::set-env name=MSI_PATH::hdf\\HDF5-1.8.21-win64.msi - else + echo "MSI_PATH=hdf\\HDF5-1.8.21-win64.msi" >> $GITHUB_ENV + elif [[ "${{matrix.version}}" == "1.10" ]]; then VERSION=1.10.0 DL_PATH=windows/extra/hdf5-1.10.0-win64-VS2015-shared.zip - echo ::set-env name=MSI_PATH::hdf5\\HDF5-1.10.0-win64.msi + echo "MSI_PATH=hdf5\\HDF5-1.10.0-win64.msi" >> $GITHUB_ENV + elif [[ "${{matrix.version}}" == "1.12" ]]; then + VERSION=1.12.0 + DL_PATH=hdf5-1.12.0-Std-win10_64-vs16.zip + echo "MSI_PATH=hdf\\HDF5-1.12.0-win64.msi" >> $GITHUB_ENV + else + VERSION=1.13.0 + DL_PATH=windows/hdf5-1.13.0-Std-win10_64-vs16.zip + echo "MSI_PATH=hdf\\HDF5-1.13.0-win64.msi" >> $GITHUB_ENV fi BASE_URL=https://support.hdfgroup.org/ftp/HDF5/prev-releases - echo ::set-env name=DL_URL::$BASE_URL/hdf5-${{matrix.version}}/hdf5-$VERSION/bin/$DL_PATH - echo ::add-path::C:\\Program Files\\HDF_Group\\HDF5\\$VERSION\\bin + echo "DL_URL=$BASE_URL/hdf5-${{matrix.version}}/hdf5-$VERSION/bin/$DL_PATH" >> $GITHUB_ENV + echo "C:\\Program Files\\HDF_Group\\HDF5\\$VERSION\\bin" >> $GITHUB_PATH - name: Install HDF5 (${{matrix.version}}) shell: pwsh run: | @@ -196,3 +258,46 @@ jobs: msiexec /i ${{env.MSI_PATH}} /quiet /qn /norestart - name: Build and test all crates run: cargo test -vv + + msrv: + name: Minimal Supported Rust Version + runs-on: ubuntu-18.04 + strategy: + fail-fast: false + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: {submodules: true} + - name: Install Rust + uses: actions-rs/toolchain@v1 + with: {toolchain: 1.51, profile: minimal, override: true} + - name: Build and test all crates + run: + cargo test --workspace -vv --features=hdf5-sys/static --exclude=hdf5-derive + + wine: + name: wine + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: {submodules: true} + - name: Install Rust + uses: actions-rs/toolchain@v1 + with: {toolchain: stable, target: x86_64-pc-windows-gnu, profile: minimal, override: true} + - name: Install dependencies + run: sudo apt-get update && sudo apt install wine64 mingw-w64 + - name: Build and test + run: env CARGO_TARGET_X86_64_PC_WINDOWS_GNU_RUNNER=wine64 cargo test --features hdf5-sys/static --target x86_64-pc-windows-gnu -- --skip test_compile_fail + addr_san: + name: Address sanitizer + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: {submodules: true} + - name: Install Rust + uses: actions-rs/toolchain@v1 + with: {toolchain: nightly, profile: minimal, override: true} + - name: Run test with sanitizer + run: env RUSTFLAGS="-Z sanitizer=address" cargo test --features hdf5-sys/static --target x86_64-unknown-linux-gnu --workspace --exclude hdf5-derive diff --git a/CHANGELOG.md b/CHANGELOG.md index d6cdc879e..6503a2cea 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,15 +1,217 @@ # Changelog +## Unreleased + +### Added + +- Support for HDF5 version 1.13.0. +- Support field renaming via `#[hdf5(rename = "new_name")]` helper attribute. +- Add a `ByteReader` which implements `std::io::{Read, Seek}` for 1D `u8` + datasets. Usage via `Dataset::as_byte_reader()`. + +### Changed + +- The `H5Type` derive macro now uses `proc-macro-error` to emit error messages. + +### Fixed + +- Fixed a bug where `H5Pget_fapl_direct` was only included when HDF5 was compiled + with feature `have-parallel` instead of `have-direct`. + +## 0.8.1 + +Release date: Nov 21, 2021. + +### Added + +- `Error` now implements `From`, which allows passing convertible + extents (like tuples of integers) where `impl TryInto` is required. +- Support for HDF5 versions 1.12.1 and 1.10.8. +- `#[derive(H5Type)]` now supports structs / tuple structs with `repr(packed)`. +- `#[derive(H5Type)]` now supports structs / tuple structs with + `repr(transparent)` (the generated HDF5 type is equivalent to the type of + the field and is not compound). + +### Changed + +- Renamed `filters::gzip_available()` to `deflate_available()` (the old name is + present but marked as deprecated). +- Code dependent on HDF5 version in `hdf5` and `hdf5-sys` crates now uses features + instead of cfg options: `cfg(feature = "1.10.1")` instead of `cfg(hdf5_1_10_1)`. + The main initial reason for that is for HDF5 versions to show up in the official + documentation on docs.rs. +- Similar to the above, there's `have-direct`, `have-parallel` and `have-threadsafe` + features that reflect the build configuration of the underlying HDF5 library. + +### Fixed + +- Fixed a bug where all blosc filter settings were discarded / zeroed out. +- Fixed errors when recovering filter pipelines from stored datasets. +- Fixed a bug where filter availability was computed incorrectly. + +## 0.8.0 + +Release date: Oct 23, 2021. + +### Added + +- Complete rewrite of `DatasetBuilder`; dataset creation API is now different + and not backwards-compatible (however, it integrates all the new features + and is more flexible and powerful). It is now possible to create and write + datasets in one step. Refer to the API docs for full reference. +- Added new `Extents` type matching HDF5 extents types: null (no elements), + scalar, simple (fixed dimensionality); it is used to query and specify shapes + of datasets. Extents objects are convertible from numbers and also tuples, + slices and vectors of indices -- all of which can be used whenever passing + extents is required (e.g., when creating a new dataset or an attribute). +- Added new `Selection` type with the surrounding API closely matching native + HDF5 selection API. This includes 'all' selection, point-wise selection and + hyperslab selection (only 'regular' hyperslabs are supported -- that is, + hyperslabs that can be represented as a single multi-dimensional box some of + whose dimensions may be infinite). Selection objects are convertible from + integers, ranges, tuples and arrays of integers and ranges; one can also use + `s!` macro from `ndarray` crate if needed. Selections can be provided when + reading and writing slices. +- Support for LZF / Blosc filters has been added. These filters are enabled by + "lzf" / "blosc" cargo features and depend on `lzf-sys` / `blosc-src` crates + respectively. Blosc filter is a meta-filter providing multi-threaded access + to the best-in-class compression codecs like Zstd and LZ4 and is recommended + to use as a default when compression performance is critical. +- Added new `Filter` type to unify the filters API; if LZF / Blosc filters are + enabled, this enum also contains the corresponding variants. It is now also + possible to provide user-defined filters with custom filter IDs and configs. +- Added wrappers for dataset creation property list (DCPL) API. This provides + access to the properties that can be specified at dataset creation time + (e.g., layout, chunking, fill values, external file linking, virtual maps, + object time tracking, attribute creation order, and a few other settings). +- Virtual dataset maps (VDS API in HDF5 1.10+) are now supported. +- Added wrappers for link creation property list (LCPL) API. +- File creation property list (FCPL) API has been extended to include a few + previously missing properties (object time tracking, attribute creation order + and few other settings). +- Added "h5-alloc" feature to `hdf5-types` crate. It enables using the HDF5 + library allocator for varlen types and dynamic values. This may be necessary + on platforms where different allocators may be used in different libraries + (e.g. dynamic libraries on Windows) or if `libhdf5` is compiled with the + memchecker option enabled. This option is force-enabled by default if using + a dll version of the HDF5 library on Windows. +- Added new `DynValue` type which represents a dynamic self-describing HDF5 + object that also knows how to deallocate itself. It supports all the HDF5 + types including compound types, strings and arrays. +- Added support for attributes and a new `Attribute` object type. The attribute + API uses the new dataset API with some restrictions imposed by HDF5 library + (e.g. one can not perform partial IO, attributes must be read all at once). +- `hdf5-sys` now exports new functions added in HDF5 1.10.6 and 1.10.7. +- Added to `Dataset`: + - `layout`: get the dataset layout. + - `dapl`, `access_plist`: get the dataset access plist. + - `dcpl`, `create_plist`: get the dataset create plist. +- Added to `Location`: + - `loc_info`, `loc_info_by_name`: retrieve information on a location. + - `loc_type`, `loc_type_by_name`: retrieve location type. + - `open_by_token`: open a location by its token (physical address). +- Added to `Group`: + - `iter_visit`: closure API for iterating over objects in a group. + - `iter_visit_default`: like `iter_visit` but with default iteration order. + - `get_all_of_type`: find all objects in a group of a given type. + - Shortcut methods for finding all objects in a group of a given type: + `datasets`, `groups`, `named_datatypes`, `link_external`. +- Added to `Handle`: + - `id_type`: get native HDF5 object type. + - `try_borrow`: instantiate a handle but don't take ownership of the object. + - `Handle` now implements `Debug`. +- Added to `ObjectClass`: + - `cast()`: safe counterpart to `cast_unchecked()`. +- Added to `Object`: + - Safe downcast methods: `as_file`, `as_group`, `as_datatype`, + `as_dataspace`, `as_dataset`, `as_attr`, `as_location`, + `as_container`, `as_plist`. +- Added to `FileAccessBuilder`: + - `libver_earliest`, `libver_v18`, `libver_v110`, `libver_latest`: + shortcuts for setting the minimum library version. +- Added to `FileAccess`: + - `libver`: shortcut for getting the minimum library version. + + ### Changed + +- Required Rust compiler version is now `1.51`. +- Removed `num-integer` and `num-traits` dependencies. +- `Dataspace` type has been reworked and can be now constructed from an + `Extents` object and sliced with a `Selection` object. +- `Dataset::fill_value` now returns an object of the newly added `DynValue` + type; this object is self-describing and knows how to free itself. +- Automatic chunking now uses a fill-from-back approach instead of the + previously used method which was borrowed from `h5py`. +- Removed the old `Filters` type (replaced by `Filter` that represents a + single filter). +- `write_slice`, `read_slice`, `read_slice_1d`, `read_slice_2d` in `Container` + now take any object convertible to `Selection` (instead of `SliceInfo`). +- `Dataset::chunks` has been renamed to `Dataset::chunk`. +- Const generics support (MSRV 1.51): `hdf5-types` now uses const generics for + array types, allowing fixed-size arrays of arbitrary sizes. `Array` trait has + been removed. String types are now generic over size: `FixedAscii` and + `FixedUnicode`. +- `ndarray` dependency has been updated to `0.15`. +- The version of HDF5 in `hdf5-src` has been updated from 1.10.6 to 1.10.7. +- `zlib` dependency is no longer included with `default-features`. +- The crate no longer calls `H5close` automatically on program exit. +- Errors are now silenced, and will not be written to stderr by default. +- `silence_errors` now works globally instead of using guards. +- Errors are no longer automatically expanded into error stacks when + encountered. This can be still done manually (e.g. via printing the error). +- Handles to HDF5 identifiers are no longer tracked via a global registry; + identifier safety is now enforced via stricter semantics of ownership. +- Handles to `File` objects will no longer close all objects contained in the + file when dropped -- weak file close degree is now used instead. For the old + behaviour see `FileCloseDegree::Strong`. +- HDF5 global variables no longer create a `lazy_static` per variable. +- Unsafe `cast()` in `ObjectClass` has been renamed to `cast_unchecked()`. +- Bump `winreg` (Windows only) to 0.10, `pretty_assertions` (dev) to 1.0. +- Updated the example in the readme to showcase the new features. + +### Fixed + +- A potential memory leak of identifier handles has been identified and fixed. +- A potential race condition occurring in multi-thread library initialisation + has been identified and fixed. + +### Removed + +- Free-standing functions `get_id_type`, `is_valid_id`, `is_valid_user_id` + have been removed in favor of `Handle` methods. + +## 0.7.1 + +Release date: Jan 27, 2021. + +### Added + +- Slices can now be used where trait item `Dimension` is required. +- Arrays of arbitrary sizes are now supported in `hdf5-types`. This requires + the crate feature `const_generics` and minimum Rust version of 1.51. + +### Changed + +- Dependencies are bumped to the newest major versions; `ndarray` users may + now use both version `0.13` and version `0.14`. + +### Fixed + +- Cross-compilation of `hdf5-src` from Unix to Windows will now use the correct + name of the static library when linking. + ## 0.7.0 +Release date: Aug 9, 2020. + ### Added - HDF5 C library can now be built from source and linked in statically, enabled via `hdf5-sys/static` feature (as of this release, the version of the bundled - sources of the library is 1.10.6). CMake is required for building. For further + sources of HDF5 is 1.10.6). CMake is required for building. For further details, see the docs for `hdf5-sys`. -- Thanks to static build option, the documentation will now be built on - [docs.rs](https://docs.rs/crate/hdf5); if it builds successfully, this +- Thanks to static build option, the documentation will now be built on + [docs.rs](https://docs.rs/crate/hdf5); if it builds successfully, this will be the official documentation source from now on. - Add support for HDF5 1.12 on all platforms and include it in CI. @@ -28,21 +230,23 @@ - We now force the variable-length allocator that HDF5 uses when reading data to use `libc::malloc` and `libc::free`, so that they can be deallocated properly by `VarLenString` and `VarLenArray` in `hdf5-types`. Previously, - this could cause a rare but serious failure for Windows builds when the - default allocator used for vlen types by HDF5 was not matching the + this could cause a rare but serious failure for Windows builds when the + default allocator used for vlen types by HDF5 was not matching the libc deallocator. -- Use `std::panic::catch_unwind` in all cases where we uses extern C callbacks, +- Use `std::panic::catch_unwind` in all cases where we use extern C callbacks, so that they are panic-safe. - `Reader::read_raw` and `Reader::read_slice` should now be `Drop`-safe in the event where the read operation fails and the destination type is not trivial. ## 0.6.1 +Release date: Apr 12, 2020. + ### Added - Implement `Default` for `H5D_layout_t`, `H5D_alloc_time_t`, `H5T_cset_t`, - `H5D_fill_time_t`, `H5D_fill_value_t` (based on what their values are set - to in default property lists). + `H5D_fill_time_t`, `H5D_fill_value_t` (based on what their values are set to + in default property lists). - Derive `Debug` for `ErrorFrame` and `ErrorStack`. - Implement `Display` for `TypeDescriptor`. - Implement `Dimension` for `[Ix]`, `&[Ix]`, `[Ix; N]` and `&[Ix; N]`. @@ -60,7 +264,7 @@ - Remove implementations of deprecated `Error::description()`. - Switch to `trybuild` instead of `compiletest_rs` for derive-macro testing; enable full tests (including hdf5-derive) on both AppVeyor and Travis. -- Update the minimum Rust version to 1.40 (because of `ndarray` and `libloading`). +- Update the minimum Rust version to 1.40 (due to `ndarray` and `libloading`). ### Fixed @@ -71,13 +275,15 @@ ## 0.6.0 +Release date: Feb 17, 2020. + ### Added - Added support for HDF5 1.10.5 with bindings for new functions. - `File::access_plist()` or `File::fapl()` to get file access plist. - `File::create_plist()` or `File::fcpl()` to get file creation plist. -- Added high-level wrappers for dataset access H5P API (`plist::DatasetAccess`). -- Added `hdf5::is_library_threadsafe()` function. +- Added wrappers for dataset access H5P API in `plist::DatasetAccess`. +- Added `is_library_threadsafe()` function. - Added `Group::member_names()`. - Added `Datatype::byte_order()`. - Added `Dataset::num_chunks()` (1.10.5+). @@ -92,27 +298,26 @@ - `File::open(path, "x" | "w-")` is now `File::create_excl(path)` - `File::open(path, "a")` is now `File::append(path)` - Also added `File::open_as(path, mode)` which accepts the mode enum. -- Rewritten `FileBuilder`: it no longer accepts userblock, driver etc; - all of these parameters can be set in the corresponding FAPL / FCPL: - - `FileBuilder::set_access_plist()` or `FileBuilder::set_fapl()` to - set the active file access plist to a given one. - - `FileBuilder::access_plist()` or `FileBuilder::fapl()` to get a - mutable reference to the FAPL builder - any parameter of it can - then be tweaked as desired. - - `FileBuilder::with_access_plist()` or `FileBuilder::with_fapl()` - to get access to the FAPL builder in an inline way via a closure. +- Rewritten `FileBuilder`: it no longer accepts userblock, driver etc.; all of + these parameters can be set in the corresponding FAPL / FCPL: + - `FileBuilder::set_access_plist()` or `FileBuilder::set_fapl()` to set the + active file access plist to a given one. + - `FileBuilder::access_plist()` or `FileBuilder::fapl()` to get a mutable + reference to the FAPL builder -- any parameter can then be tweaked. + - `FileBuilder::with_access_plist()` or `FileBuilder::with_fapl()` to get + access to the FAPL builder in an inline way via a closure. - Same as the three above for `create_plist` / `fcpl`. -- As a result, all of the newly added FAPL / FCPL functionality is - fully accessible in the new `FileBuilder`. Also, driver strings - are gone, everything is strongly typed now. -- It's no longer prohibited to set FCPL options when opening a file - and not creating it -- it will simply be silently ignored (this - simplifies the behavior and allows using a single file builder). -- Added an explicit `hdf5_types::string::StringError` error type, - as a result `error-chain` dependency has been dropped. -- `hdf5::Error` is now convertible from `ndarray::ShapeError`; - `hdf5::ResultExt` trait has been removed. -- Renamed `hdf5::hdf5_version()` to `hdf5::library_version()`. +- As a result, all the newly added FAPL / FCPL functionality is fully + accessible in the new `FileBuilder`. Also, driver strings are gone, + everything is strongly typed now. +- It's no longer prohibited to set FCPL options when opening a file and not + creating it -- it will simply be silently ignored (this simplifies the + behavior and allows using a single file builder). +- Added an explicit `hdf5_types::string::StringError` error type, and + `error-chain` dependency has now been dropped. +- `Error` is now convertible from `ndarray::ShapeError`; `ResultExt` trait has + been removed. +- Renamed `hdf5_version()` to `library_version()`. ### Fixed @@ -121,6 +326,8 @@ ## 0.5.2 +Release date: Jul 14, 2019. + ### Changed - Allow chunk dimensions to exceed dataset dimensions for resizable datasets. @@ -128,6 +335,8 @@ ## 0.5.1 +Release date: Mar 8, 2019. + ### Added - Added `Group::link_exists()`. @@ -136,71 +345,80 @@ ### Changed -- Using `#[derive(H5Type)]` no longer requires adding `hdf5-types` as a dependency. +- `#[derive(H5Type)]` no longer requires adding `hdf5-types` as a dependency. ## 0.5.0 +Release date: Mar 8, 2019. + ### Added - Added support for HDF5 1.10. -- Added Rust equivalents of HDF5 primitives: arrays, Unicode strings and ASCII strings – all of - them available in both fixed-size or variable-length flavours (`hdf5-types` crate). -- Added `H5Type` trait that unifies the types that can be handled by the HDF5 library. This trait - is implemented by default for all scalar types, tuples, fixed-size arrays and all types in - `hdf5-types` and can be used to create `Datatype` objects. -- Implemented `#[derive(H5Type)]` proc macro that allows for seamless mapping of user-defined - structs and enums to their HDF5 counterparts. +- Added Rust equivalents of HDF5 primitives: arrays, Unicode strings and ASCII + strings – all of them available in both fixed-size/variable-length flavours + (see `hdf5-types` crate for details). +- Added `H5Type` trait that unifies the types that can be handled by the HDF5 + library. This trait is implemented by default for all scalar types, tuples, + fixed-size arrays and all types in `hdf5-types` and can be used to create + `Datatype` objects for known types. +- Implemented `#[derive(H5Type)]` proc macro that allows for seamless mapping + of user-defined structs and enums to their HDF5 counterparts. - Added high-level wrappers for file-creation H5P API (`plist::FileCreate`) and file-access H5P API (`plist::FileAccess`), covering almost the entirety of FCPL and FAPL property list functionality. - Various improvements and additions to `PropertyList` type. -- Added support for various file drivers (sec2/stdio/core/family/multi/split/log). -- Added support for MPIO driver (HDF5 has to be built with H5_HAVE_PARALLEL and - the crate has to be built with "mpio" feature enabled). -- Added support for direct VFD driver (HDF5 has to be built with H5_HAVE_DIRECT). -- Added some missing bindings to `hdf5-sys`: driver-related FAPL bindings - in h5p/h5fd (including MPIO and direct VFD drivers), MPIO bindings in h5p/h5f/h5fd. -- Added core reading/writing API in `Container`, with support for reading/writing scalars, - 1-d, 2-d, and dynamic-dimensional arrays, and raw slices. As a side effect, the main crate - now depends on `ndarray`. `Dataset` now dereferences to `Container`. +- Added support for file drivers: sec2/stdio/core/family/multi/split/log. +- Added support for MPIO driver (requires `H5_HAVE_PARALLEL` HDF5 flag and the + crate has to be built with "mpio" feature enabled). +- Added support for direct VFD driver (requires `H5_HAVE_DIRECT` HDF5 flag). +- Added some missing bindings to `hdf5-sys`: driver-related FAPL bindings in + `h5p` and `h5fd` (including MPIO and direct VFD drivers), MPIO bindings in + `h5p`, `h5f` and `h5fd`. +- Added core reading/writing API in `Container`, with support for reading and + writing scalars, 1-D/2-D/dynamic-dimensional arrays and raw slices. As a + side effect, the main crate now depends on `ndarray`. `Dataset` now + dereferences to `Container`. - Added basic support for reading and writing dataset slices. -- When creating datasets, in-memory type layouts are normalized (converted to C repr). -- Added `packed` option to `DatasetBuilder` (for creating packed HDF5 datasets). -- All high-level objects now implement `Clone` (shallow copy, increases refcount). +- When creating datasets, in-memory type layouts are normalized (converted to + C representation). +- Added `packed` option to `DatasetBuilder` for creating packed HDF5 layouts. +- All object types now implement `Clone` (shallow copy, increases refcount). ### Changed -- Renamed `hdf5-rs` crate (importable as `h5`) to `hdf5` (importable simply as `hdf5`). +- Renamed `hdf5-rs` crate to `hdf5`. - Renamed `libhdf5-sys` crate to `hdf5-sys` (importable as `hdf5_sys`). - Renamed GitHub repository to `aldanor/hdf5-rust`. -- Updated the bindings and tests to the latest HDF5 versions (1.10.4 and 1.8.21). +- Updated the bindings and tests to the latest HDF5 versions (1.10.4 / 1.8.21). - The build system has been reworked from the ground up: - - `hdf5-lib` crate has been removed; all of the build-time logic now resides - in the build script of `hdf5-sys`. - - The environment variables the build script reacts to are now `HDF5_DIR` and `HDF5_VERSION`. + - `hdf5-lib` crate has been removed; all the build-time logic now resides in + the build script of `hdf5-sys`. + - The build script now looks for `HDF5_DIR` and `HDF5_VERSION` env vars. - `pkg-config` is now only launched on Linux. - - On macOS, the build scripts detects Homebrew installations, for both 1.8 and 1.10 versions. - - On Windows, we now scan the registry to detect official system-wide installations. - - Dynamic linking now works with conda envs; `HDF5_DIR` can be now pointed to a conda env. - - A few definitions from `H5pubconf.h` are now exposed as cfg definitions, like - `h5_have_parallel`, `h5_have_threadsafe` and `h5_have_direct` (this requires us to - locate the include folder and parse the header at build time). -- Various clean ups in `hdf5-sys`: implemented `Default` and `Clone` where + - macOS: the build script detects Homebrew installations (both 1.8 and 1.10). + - Windows: we now scan the registry to detect official system-wide installs. + - `HDF5_DIR` can be now pointed to a conda env for dynamic linking. + - A few definitions from `H5pubconf.h` are now exposed as cfg definitions, + like `h5_have_parallel`, `h5_have_threadsafe` and `h5_have_direct` (this + requires locating the include folder and parsing the header at build time). +- Various cleanups in `hdf5-sys`: implemented `Default` and `Clone` where applicable, moved a few types and methods to matching parent modules. -- Major refactor: trait-based type hierarchy has been replaced with a `Deref`-based - hierarchy instead (53eff4f). `ID` and `FromID` traits have been removed. Traits like `Location`, - `Object` and a few other have been replaced with real types (wrappers around HDF5 handles, same - as the concrete types like `File`). Subtypes then dereference into parent types, so the - user can user methods of the parent type without having to import any traits into scope - (for instance, `File` dereferences into `Group`, which dereferences into `Location`, - which dereferences into `Object`). -- Dataspaces and property lists can now be copied via `.copy()` method (instead of `.clone()`). +- Major refactor: trait-based type hierarchy has been replaced with a + `Deref`-based hierarchy instead (53eff4f). `ID` and `FromID` traits have been + removed. Traits like `Location`, `Object` and a few others have been replaced + with real types (wrappers around HDF5 handles, same as the concrete types + like `File`). Subtypes then dereference into parent types, so the user can + call methods of the parent type without having to import any traits into + scope (for instance, `File` dereferences into `Group`, which dereferences + into `Location`, which dereferences into `Object`). +- Dataspaces and property lists can now be properly copied via `.copy()` method + (instead of `.clone()` which now yields a shallow copy increasing refcount). ### Fixed -- `hbool_t` is now mapped to unsigned integer of proper size (either 1 byte or 4 bytes), - depending on how the HDF5 library was built and on which platform. -- Added missing bindings for previous versions (mostly in `h5p` and `h5fd` modules). +- `hbool_t` is now mapped to unsigned integer of proper size (either 1 byte or + 4 bytes), depending on how the HDF5 library was built and on which platform. +- Added missing bindings for previous versions (mostly in `h5p` and `h5fd`). - Querying the HDF5 error stack is now thread-safe. - Error silencing (`silence_errors()`) is now thread-safe. - Fixed wrong bindings for `H5AC_cache_config_t`. @@ -208,55 +426,64 @@ ### Removed - Removed `hdf5-lib` crate (merged it into `hdf5-sys`, see above). -- Removed `remutex` crate, using locking primitives from `parking_lot` crate instead. -- `Container` trait has been removed, all of its functionality merged into `Group` type. +- Removed `remutex` crate, using locking primitives from `parking_lot` crate. +- `Container` trait has been removed in favor of `Group` type. ### Notes -- The version number jump is due to renaming crates `hdf5-rs` and `libhdf5-sys` to `hdf5` and - `hdf5-sys`, respectively. Since there were already published crates with those names and - the crates registry is meant to be immutable even if the crates are yanked, we had to - bump the version so that it shadows all of the older versions. +- The version number jump is due to renaming crates `hdf5-rs` and `libhdf5-sys` + to `hdf5` and `hdf5-sys`, respectively. Since there were already published + crates with those names and the crates registry is meant to be immutable even + if the crates are yanked, we had to bump the version so that it shadows all + the older versions. + +## 0.2.0 -## 0.2.0 (Apr 17, 2016) +Release date: Jul 29, 2015. ### Added -- Full support of `msvc` target on Windows. CI tests on AppVeyor now use official reeases of HDF5 - binaries (1.8.16, VS2015, x86_x64). The `gnu` target are still unofficially supported but - won't be tested. -- If `HDF5_LIBDIR` is not specified when building on Windows and `PATH` contains what looks like - the `bin` folder of HDF5 installation, the library directory will be inferred automatically. - The official HDF5 installers add the `bin` folder to user path, so the official MSVC releases - should just work out of the box without having to set any environment variables. -- The library is now split into three crates: `hdf5-lib` (requests linkage to HDF5), - `hdf5-sys` (contains bindings, requires `hdf5-lib` at build time in order to conditionally - enable or disable certain HDF5 functionality), and `hdf5` (the user-facing crate, requires - both lower-level crates at build time). +- Full support of `msvc` target on Windows. CI tests on AppVeyor now use + official releases of HDF5 binaries (1.8.16, VS2015, x86_x64). The `gnu` + target are still unofficially supported but won't be tested. +- If `HDF5_LIBDIR` is not specified when building on Windows and `PATH` + contains what looks like the `bin` folder of HDF5 installation, the library + directory will be inferred automatically. The official HDF5 installers add + the `bin` folder to user path, so the official MSVC releases should just work + out of the box without having to set any environment variables. +- The library is now split into three crates: `hdf5-lib` (requests linkage to + HDF5), `hdf5-sys` (contains bindings, requires `hdf5-lib` at build time in + order to conditionally enable or disable certain HDF5 functionality), and + `hdf5` (the user-facing crate, requires both lower-level crates at build time). - Added `hdf5::hdf5_version` function. - The minimum required version of the HDF5 library is now 1.8.4. -- Both `hdf5-sys` and `hdf5` crates can now use version attributes at compile time to - enable/disable/change functionality. All functions and definitions that appeared in HDF5 versions - past 1.8.4 are now conditionally enabled in `hdf5-sys`. -- Added bindings for HDF5 functions that were added in releases 1.8.15 and 1.8.16. -- Static global variables in HDF5 (H5E, H5P, H5T) are now linked based on HDF5 version and not - the target platform (`_ID_g` variables were introduced in 1.8.14). When `msvc` target is used, - the dllimport stub prefixes are also accounted for. The constants exposed by `hdf5-sys` are - now of reference type and need to be dereferenced upon use (for `msvc`, they have to be - dereferenced twice). +- Both `hdf5-sys` and `hdf5` crates can now use version attributes at compile + time to enable/disable/change functionality. All functions and definitions + that appeared in HDF5 versions past 1.8.4 are now conditionally enabled in + `hdf5-sys`. +- Added bindings for HDF5 functions that were added in 1.8.15 and 1.8.16. +- Static global variables in HDF5 (H5E, H5P, H5T) are now linked based on HDF5 + version and not the target platform (`_ID_g` variables were introduced in + 1.8.14). When `msvc` target is used, dllimport stub prefixes are also + accounted for. Constants exposed by `hdf5-sys` are now of reference type and + need to be dereferenced (for `msvc`, they have to be dereferenced twice). ### Changed -- API simplification: many methods previously expecting `Into` inputs now just take `&str`. -- `util::to_cstring` now takes `Borrow` instead of `Into` so as to avoid - unnecessary allocations, and the return value is now wrapped in `Result` so that interior - null bytes in input strings trigger an error. +- API simplification: many methods previously expecting `Into` inputs + now just take `&str`. +- `util::to_cstring` now takes `Borrow` instead of `Into` to avoid + unnecessary allocations, and the return value is now wrapped in `Result` so + that interior null bytes in input strings trigger an error. ### Fixed -- Fixed dangling pointer problems when strings were being passed as pointers to the C API. +- Fixed dangling pointer problems when strings were passed as pointers to + the C API. - Fixed target path not being passed correctly in `Container::link_soft`. -## 0.1.1 (July 2015) +## 0.1.0 + +Release date: Jul 27, 2015. Initial public version. diff --git a/Cargo.toml b/Cargo.toml index 05dd799be..1d3851b49 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hdf5" -version = "0.7.0" # !V +version = "0.8.1" # !V authors = ["Ivan Smirnov "] keywords = ["hdf5"] license = "MIT OR Apache-2.0" @@ -10,10 +10,12 @@ repository = "https://github.com/aldanor/hdf5-rust" homepage = "https://github.com/aldanor/hdf5-rust" build = "build.rs" edition = "2018" +categories = ["science", "filesystem"] [features] default = [] mpio = ["mpi-sys", "hdf5-sys/mpio"] +conda = ["hdf5-sys/conda"] [workspace] members = [".", "hdf5-types", "hdf5-derive", "hdf5-sys", "hdf5-src"] @@ -23,22 +25,26 @@ default-members = [".", "hdf5-types", "hdf5-derive", "hdf5-sys"] bitflags = "1.2" lazy_static = "1.4" libc = "0.2" -parking_lot = "0.10" -ndarray = "0.13" -num-integer = "0.1" -num-traits = "0.2" +parking_lot = "0.11" +ndarray = "0.15" +paste = "1.0" mpi-sys = { version = "0.1", optional = true } -hdf5-sys = { path = "hdf5-sys", version = "0.7.0" } # !V -hdf5-types = { path = "hdf5-types", version = "0.7.0" } # !V -hdf5-derive = { path = "hdf5-derive", version = "0.7.0" } # !V +errno = { version = "0.2", optional = true } +hdf5-sys = { path = "hdf5-sys", version = "0.8.1" } # !V +hdf5-types = { path = "hdf5-types", version = "0.8.1" } # !V +hdf5-derive = { path = "hdf5-derive", version = "0.8.1" } # !V +blosc-sys = { version = "0.1.1", package = "blosc-src", optional = true } +lzf-sys = { version = "0.1", optional = true } +cfg-if = "1.0" [dev-dependencies] -mashup = "0.1" -pretty_assertions = "0.6" -rand = { version = "0.7", features = ["small_rng"] } +paste = "1.0" +pretty_assertions = "1.0" +rand = { version = "0.8", features = ["small_rng"] } regex = "1.3" scopeguard = "1.0" -tempdir = "0.3" +tempfile = "3.2" [package.metadata.docs.rs] -features = ["hdf5-sys/static", "hdf5-sys/zlib"] +features = ["hdf5-sys/static", "hdf5-sys/zlib", "blosc", "lzf"] +rustdoc-args = ["--cfg", "docsrs"] diff --git a/README.md b/README.md index 19d2d2277..31c68c330 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,8 @@ HDF5 for Rust. [![Latest Version](https://img.shields.io/crates/v/hdf5.svg)](https://crates.io/crates/hdf5) [![Documentation](https://docs.rs/hdf5/badge.svg)](https://docs.rs/hdf5) [![Changelog](https://img.shields.io/github/v/release/aldanor/hdf5-rust)](https://github.com/aldanor/hdf5-rust/blob/master/CHANGELOG.md) +![hdf5: rustc 1.51+](https://img.shields.io/badge/hdf5-rustc_1.51+-lightblue.svg) +[![Total Lines](https://tokei.rs/b1/github/aldanor/hdf5-rust)](https://github.com/aldanor/hdf5-rust) [![Apache 2.0](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0) [![MIT](https://img.shields.io/badge/License-MIT-blue.svg)](https://opensource.org/licenses/MIT) @@ -24,56 +26,77 @@ Requires HDF5 library of version 1.8.4 or later. ## Example ```rust -#[derive(hdf5::H5Type, Clone, PartialEq, Debug)] +#[cfg(feature = "blosc")] +use hdf5::filters::blosc_set_nthreads; +use hdf5::{File, H5Type, Result}; +use ndarray::{arr2, s}; + +#[derive(H5Type, Clone, PartialEq, Debug)] // register with HDF5 #[repr(u8)] pub enum Color { - RED = 1, - GREEN = 2, - BLUE = 3, + R = 1, + G = 2, + B = 3, } -#[derive(hdf5::H5Type, Clone, PartialEq, Debug)] +#[derive(H5Type, Clone, PartialEq, Debug)] // register with HDF5 #[repr(C)] pub struct Pixel { xy: (i64, i64), color: Color, } -fn main() -> hdf5::Result<()> { - use self::Color::*; - use ndarray::{arr1, arr2}; - - // so that libhdf5 doesn't print errors to stdout - let _e = hdf5::silence_errors(); - - { - // write - let file = hdf5::File::create("pixels.h5")?; - let colors = file.new_dataset::().create("colors", 2)?; - colors.write(&[RED, BLUE])?; - let group = file.create_group("dir")?; - let pixels = group.new_dataset::().create("pixels", (2, 2))?; - pixels.write(&arr2(&[ - [Pixel { xy: (1, 2), color: RED }, Pixel { xy: (3, 4), color: BLUE }], - [Pixel { xy: (5, 6), color: GREEN }, Pixel { xy: (7, 8), color: RED }], - ]))?; - } - { - // read - let file = hdf5::File::open("pixels.h5")?; - let colors = file.dataset("colors")?; - assert_eq!(colors.read_1d::()?, arr1(&[RED, BLUE])); - let pixels = file.dataset("dir/pixels")?; - assert_eq!( - pixels.read_raw::()?, - vec![ - Pixel { xy: (1, 2), color: RED }, - Pixel { xy: (3, 4), color: BLUE }, - Pixel { xy: (5, 6), color: GREEN }, - Pixel { xy: (7, 8), color: RED }, - ] - ); +impl Pixel { + pub fn new(x: i64, y: i64, color: Color) -> Self { + Self { xy: (x, y), color } } +} + +fn write_hdf5() -> Result<()> { + use Color::*; + let file = File::create("pixels.h5")?; // open for writing + let group = file.create_group("dir")?; // create a group + #[cfg(feature = "blosc")] + blosc_set_nthreads(2); // set number of blosc threads + let builder = group.new_dataset_builder(); + #[cfg(feature = "blosc")] + let builder = builder.blosc_zstd(9, true); // zstd + shuffle + let ds = builder + .with_data(&arr2(&[ + // write a 2-D array of data + [Pixel::new(1, 2, R), Pixel::new(2, 3, B)], + [Pixel::new(3, 4, G), Pixel::new(4, 5, R)], + [Pixel::new(5, 6, B), Pixel::new(6, 7, G)], + ])) + // finalize and write the dataset + .create("pixels")?; + // create an attr with fixed shape but don't write the data + let attr = ds.new_attr::().shape([3]).create("colors")?; + // write the attr data + attr.write(&[R, G, B])?; + Ok(()) +} + +fn read_hdf5() -> Result<()> { + use Color::*; + let file = File::open("pixels.h5")?; // open for reading + let ds = file.dataset("dir/pixels")?; // open the dataset + assert_eq!( + // read a slice of the 2-D dataset and verify it + ds.read_slice::(s![1.., ..])?, + arr2(&[ + [Pixel::new(3, 4, G), Pixel::new(4, 5, R)], + [Pixel::new(5, 6, B), Pixel::new(6, 7, G)], + ]) + ); + let attr = ds.attr("colors")?; // open the attribute + assert_eq!(attr.read_1d::()?.as_slice().unwrap(), &[R, G, B]); + Ok(()) +} + +fn main() -> Result<()> { + write_hdf5()?; + read_hdf5()?; Ok(()) } ``` @@ -89,7 +112,7 @@ toolchains; macOS Catalina). ### Rust `hdf5` crate is tested continuously for all three official release channels, and -requires a reasonably recent Rust compiler (e.g. of version 1.40 or newer). +requires a reasonably recent Rust compiler (e.g. of version 1.51 or newer). ### HDF5 diff --git a/build.rs b/build.rs index 2c0e94a11..caccb2813 100644 --- a/build.rs +++ b/build.rs @@ -1,19 +1,21 @@ use std::env; fn main() { + let print_feature = |key: &str| println!("cargo:rustc-cfg=feature=\"{}\"", key); + let print_cfg = |key: &str| println!("cargo:rustc-cfg={}", key); for (key, _) in env::vars() { - let key = match key.as_str() { - "DEP_HDF5_HAVE_DIRECT" => "h5_have_direct".into(), - "DEP_HDF5_HAVE_STDBOOL" => "h5_have_stdbool".into(), - "DEP_HDF5_HAVE_PARALLEL" => "h5_have_parallel".into(), - "DEP_HDF5_HAVE_THREADSAFE" => "h5_have_threadsafe".into(), - "DEP_HDF5_MSVC_DLL_INDIRECTION" => "h5_dll_indirection".into(), + match key.as_str() { + // public features + "DEP_HDF5_HAVE_DIRECT" => print_feature("have-direct"), + "DEP_HDF5_HAVE_PARALLEL" => print_feature("have-parallel"), + "DEP_HDF5_HAVE_THREADSAFE" => print_feature("have-threadsafe"), + // internal config flags + "DEP_HDF5_MSVC_DLL_INDIRECTION" => print_cfg("msvc_dll_indirection"), + // public version features key if key.starts_with("DEP_HDF5_VERSION_") => { - let version = key.trim_start_matches("DEP_HDF5_VERSION_"); - format!("hdf5_{}", version) + print_feature(&key.trim_start_matches("DEP_HDF5_VERSION_").replace('_', ".")); } _ => continue, - }; - println!("cargo:rustc-cfg={}", key); + } } } diff --git a/examples/chunking.rs b/examples/chunking.rs new file mode 100644 index 000000000..f12d6808f --- /dev/null +++ b/examples/chunking.rs @@ -0,0 +1,38 @@ +//! Create, write, and read a chunked dataset + +use hdf5::{File, Result}; +use ndarray::Array2; + +fn main() -> Result<()> { + let file = File::create("chunking.h5")?; + + let (ny, nx) = (100, 100); + let arr = Array2::from_shape_fn((ny, nx), |(j, i)| (1000 * j + i) as f32); + + let ds = file + .new_dataset::() + .chunk((1, ny, nx)) // each chunk contains ny * nx elements + .shape((1.., ny, nx)) // first axis is unlimited with initial size of 1 + .deflate(3) + .create("variable")?; + + // writing one chunk at a time is the most efficient + ds.write_slice(&arr, (0, .., ..))?; + + // dataset can be resized along an unlimited dimension + ds.resize((10, ny, nx))?; + ds.write_slice(&arr, (1, .., ..))?; + + let chunksize = ds.chunk().unwrap(); + assert_eq!(chunksize, &[1, ny, nx]); + + let shape = ds.shape(); + assert_eq!(shape, &[10, ny, nx]); + + // it's best to read from a chunked dataset in a chunk-wise fashion + for k in 0..shape[0] { + let _arr: Array2 = ds.read_slice((k, .., ..))?; + } + + Ok(()) +} diff --git a/examples/simple.rs b/examples/simple.rs index 9fb479789..64563dc39 100644 --- a/examples/simple.rs +++ b/examples/simple.rs @@ -1,52 +1,73 @@ -#[derive(hdf5::H5Type, Clone, PartialEq, Debug)] +#[cfg(feature = "blosc")] +use hdf5::filters::blosc_set_nthreads; +use hdf5::{File, H5Type, Result}; +use ndarray::{arr2, s}; + +#[derive(H5Type, Clone, PartialEq, Debug)] // register with HDF5 #[repr(u8)] pub enum Color { - RED = 1, - GREEN = 2, - BLUE = 3, + R = 1, + G = 2, + B = 3, } -#[derive(hdf5::H5Type, Clone, PartialEq, Debug)] +#[derive(H5Type, Clone, PartialEq, Debug)] // register with HDF5 #[repr(C)] pub struct Pixel { xy: (i64, i64), color: Color, } -fn main() -> hdf5::Result<()> { - use self::Color::*; - use ndarray::{arr1, arr2}; +impl Pixel { + pub fn new(x: i64, y: i64, color: Color) -> Self { + Self { xy: (x, y), color } + } +} + +fn write_hdf5() -> Result<()> { + use Color::*; + let file = File::create("pixels.h5")?; // open for writing + let group = file.create_group("dir")?; // create a group + #[cfg(feature = "blosc")] + blosc_set_nthreads(2); // set number of blosc threads + let builder = group.new_dataset_builder(); + #[cfg(feature = "blosc")] + let builder = builder.blosc_zstd(9, true); // zstd + shuffle + let ds = builder + .with_data(&arr2(&[ + // write a 2-D array of data + [Pixel::new(1, 2, R), Pixel::new(2, 3, B)], + [Pixel::new(3, 4, G), Pixel::new(4, 5, R)], + [Pixel::new(5, 6, B), Pixel::new(6, 7, G)], + ])) + // finalize and write the dataset + .create("pixels")?; + // create an attr with fixed shape but don't write the data + let attr = ds.new_attr::().shape([3]).create("colors")?; + // write the attr data + attr.write(&[R, G, B])?; + Ok(()) +} - // so that libhdf5 doesn't print errors to stdout - let _e = hdf5::silence_errors(); +fn read_hdf5() -> Result<()> { + use Color::*; + let file = File::open("pixels.h5")?; // open for reading + let ds = file.dataset("dir/pixels")?; // open the dataset + assert_eq!( + // read a slice of the 2-D dataset and verify it + ds.read_slice::(s![1.., ..])?, + arr2(&[ + [Pixel::new(3, 4, G), Pixel::new(4, 5, R)], + [Pixel::new(5, 6, B), Pixel::new(6, 7, G)], + ]) + ); + let attr = ds.attr("colors")?; // open the attribute + assert_eq!(attr.read_1d::()?.as_slice().unwrap(), &[R, G, B]); + Ok(()) +} - { - // write - let file = hdf5::File::create("pixels.h5")?; - let colors = file.new_dataset::().create("colors", 2)?; - colors.write(&[RED, BLUE])?; - let group = file.create_group("dir")?; - let pixels = group.new_dataset::().create("pixels", (2, 2))?; - pixels.write(&arr2(&[ - [Pixel { xy: (1, 2), color: RED }, Pixel { xy: (3, 4), color: BLUE }], - [Pixel { xy: (5, 6), color: GREEN }, Pixel { xy: (7, 8), color: RED }], - ]))?; - } - { - // read - let file = hdf5::File::open("pixels.h5")?; - let colors = file.dataset("colors")?; - assert_eq!(colors.read_1d::()?, arr1(&[RED, BLUE])); - let pixels = file.dataset("dir/pixels")?; - assert_eq!( - pixels.read_raw::()?, - vec![ - Pixel { xy: (1, 2), color: RED }, - Pixel { xy: (3, 4), color: BLUE }, - Pixel { xy: (5, 6), color: GREEN }, - Pixel { xy: (7, 8), color: RED }, - ] - ); - } +fn main() -> Result<()> { + write_hdf5()?; + read_hdf5()?; Ok(()) } diff --git a/hdf5-derive/Cargo.toml b/hdf5-derive/Cargo.toml index c471751c7..d39558f5d 100644 --- a/hdf5-derive/Cargo.toml +++ b/hdf5-derive/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hdf5-derive" -version = "0.7.0" # !V +version = "0.8.1" # !V authors = ["Ivan Smirnov "] keywords = ["hdf5"] license = "MIT OR Apache-2.0" @@ -8,15 +8,18 @@ description = "Derive macro for HDF5 structs and enums." repository = "https://github.com/aldanor/hdf5-rust" homepage = "https://github.com/aldanor/hdf5-rust" edition = "2018" +readme = "README.md" +categories = ["procedural-macro-helpers"] [lib] proc-macro = true [dependencies] +proc-macro-error = { version = "1.0.4", default-features = false } proc-macro2 = "1.0" quote = "^1.0.2" syn = { version = "^1.0.5", features = ["derive", "extra-traits"]} [dev-dependencies] trybuild = "1.0" -hdf5 = { version = ">=0.6", path = ".." } +hdf5 = { version = ">=0.7.1", path = ".." } diff --git a/hdf5-derive/README.md b/hdf5-derive/README.md new file mode 100644 index 000000000..f273d2cf2 --- /dev/null +++ b/hdf5-derive/README.md @@ -0,0 +1,3 @@ +# hdf5-derive + +Derive `hdf5` compatible types from rust types diff --git a/hdf5-derive/src/lib.rs b/hdf5-derive/src/lib.rs index 9d478823b..1dc99be34 100644 --- a/hdf5-derive/src/lib.rs +++ b/hdf5-derive/src/lib.rs @@ -1,19 +1,19 @@ #![recursion_limit = "192"] -extern crate proc_macro; - use std::iter; use std::mem; use std::str::FromStr; use proc_macro2::{Ident, Span, TokenStream}; +use proc_macro_error::{abort, proc_macro_error}; use quote::{quote, ToTokens}; use syn::{ - parse_macro_input, AttrStyle, Attribute, Data, DeriveInput, Expr, Fields, Index, Meta, + parse_macro_input, AttrStyle, Attribute, Data, DeriveInput, Expr, Fields, Index, Lit, Meta, NestedMeta, Type, TypeGenerics, TypePath, }; -#[proc_macro_derive(H5Type)] +#[proc_macro_derive(H5Type, attributes(hdf5))] +#[proc_macro_error] pub fn derive(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); let name = input.ident; @@ -61,7 +61,13 @@ where } } -fn impl_enum(names: Vec, values: Vec, repr: &Ident) -> TokenStream { +fn impl_transparent(ty: &Type) -> TokenStream { + quote! { + <#ty as _h5::types::H5Type>::type_descriptor() + } +} + +fn impl_enum(names: &[String], values: &[Expr], repr: &Ident) -> TokenStream { let size = Ident::new( &format!( "U{}", @@ -78,7 +84,7 @@ fn impl_enum(names: Vec, values: Vec, repr: &Ident) -> TokenStream signed: #signed, members: vec![#( _h5::types::EnumMember { - name: stringify!(#names).to_owned(), + name: #names.to_owned(), value: (#values) as #repr as _, } ),*], @@ -90,7 +96,7 @@ fn impl_enum(names: Vec, values: Vec, repr: &Ident) -> TokenStream fn is_phantom_data(ty: &Type) -> bool { match *ty { Type::Path(TypePath { qself: None, ref path }) => { - path.segments.iter().last().map(|x| x.ident == "PhantomData").unwrap_or(false) + path.segments.iter().last().map_or(false, |x| x.ident == "PhantomData") } _ => false, } @@ -126,6 +132,28 @@ fn find_repr(attrs: &[Attribute], expected: &[&str]) -> Option { None } +fn find_hdf5_rename(attrs: &[Attribute]) -> Option { + if let Some(attr) = attrs.iter().find(|a| a.path.is_ident("hdf5")) { + if let Ok(Meta::List(meta_list)) = attr.parse_meta() { + let rename_literal = meta_list.nested.iter().find_map(|n| { + if let NestedMeta::Meta(Meta::NameValue(name_value)) = n { + if name_value.path.is_ident("rename") { + return Some(&name_value.lit); + } + } + + None + }); + + if let Some(Lit::Str(renamed)) = rename_literal { + return Some(renamed.value()); + } + } + } + + None +} + fn pluck<'a, I, F, T, S>(iter: I, func: F) -> Vec where I: Iterator, @@ -141,19 +169,32 @@ fn impl_trait( match *data { Data::Struct(ref data) => match data.fields { Fields::Unit => { - panic!("Cannot derive H5Type for unit structs"); + abort!(ty, "cannot derive `H5Type` for unit structs"); } Fields::Named(ref fields) => { let fields: Vec<_> = fields.named.iter().filter(|f| !is_phantom_data(&f.ty)).collect(); if fields.is_empty() { - panic!("Cannot derive H5Type for empty structs"); + abort!(ty, "cannot derive `H5Type` for empty structs"); + } + + let repr = + find_repr(attrs, &["C", "packed", "transparent"]).unwrap_or_else(|| { + abort!(ty, + "`H5Type` requires repr(C), repr(packed) or repr(transparent) for structs") + }); + if repr == "transparent" { + assert_eq!(fields.len(), 1); + impl_transparent(&fields[0].ty) + } else { + let types = pluck(fields.iter(), |f| f.ty.clone()); + let names = pluck(fields.iter(), |f| { + find_hdf5_rename(&f.attrs) + .unwrap_or_else(|| f.ident.as_ref().unwrap().to_string()) + }); + let fields = pluck(fields.iter(), |f| f.ident.clone().unwrap()); + impl_compound(ty, ty_generics, &fields, &names, &types) } - find_repr(attrs, &["C"]).expect("H5Type requires #[repr(C)] for structs"); - let types = pluck(fields.iter(), |f| f.ty.clone()); - let fields = pluck(fields.iter(), |f| f.ident.clone().unwrap()); - let names = fields.iter().map(|f| f.to_string()).collect::>(); - impl_compound(ty, ty_generics, &fields, &names, &types) } Fields::Unnamed(ref fields) => { let (index, fields): (Vec, Vec<_>) = fields @@ -164,31 +205,52 @@ fn impl_trait( .map(|(i, f)| (Index::from(i), f)) .unzip(); if fields.is_empty() { - panic!("Cannot derive H5Type for empty tuple structs"); + abort!(ty, "cannot derive `H5Type` for empty tuple structs") + } + + let repr = find_repr(attrs, &["C", "packed", "transparent"]).unwrap_or_else(|| { + abort!(ty, + "`H5Type` requires repr(C), repr(packed) or repr(transparent) for tuple structs") + }); + if repr == "transparent" { + assert_eq!(fields.len(), 1); + impl_transparent(&fields[0].ty) + } else { + let names = fields + .iter() + .enumerate() + .map(|(n, f)| find_hdf5_rename(&f.attrs).unwrap_or_else(|| n.to_string())) + .collect::>(); + let types = pluck(fields.iter(), |f| f.ty.clone()); + impl_compound(ty, ty_generics, &index, &names, &types) } - find_repr(attrs, &["C"]).expect("H5Type requires #[repr(C)] for structs"); - let names = (0..fields.len()).map(|f| f.to_string()).collect::>(); - let types = pluck(fields.iter(), |f| f.ty.clone()); - impl_compound(ty, ty_generics, &index, &names, &types) } }, Data::Enum(ref data) => { let variants = &data.variants; + if variants.iter().any(|v| v.fields != Fields::Unit || v.discriminant.is_none()) { - panic!("H5Type can only be derived for enums with scalar discriminants"); - } else if variants.is_empty() { - panic!("Cannot derive H5Type for empty enums") + abort!(ty, "`H5Type` can only be derived for enums with scalar discriminants") } + + if variants.is_empty() { + abort!(ty, "cannot derive `H5Type` for empty enums") + } + let enum_reprs = &["i8", "i16", "i32", "i64", "u8", "u16", "u32", "u64", "isize", "usize"]; - let repr = find_repr(attrs, enum_reprs) - .expect("H5Type can only be derived for enums with explicit representation"); - let names = pluck(variants.iter(), |v| v.ident.clone()); + let repr = find_repr(attrs, enum_reprs).unwrap_or_else(|| { + abort!(ty, "`H5Type` can only be derived for enums with explicit representation") + }); + let names = variants + .iter() + .map(|v| find_hdf5_rename(&v.attrs).unwrap_or_else(|| v.ident.to_string())) + .collect::>(); let values = pluck(variants.iter(), |v| v.discriminant.clone().unwrap().1); - impl_enum(names, values, &repr) + impl_enum(&names, &values, &repr) } Data::Union(_) => { - panic!("Cannot derive H5Type for tagged unions"); + abort!(ty, "cannot derive `H5Type` for tagged unions"); } } } diff --git a/hdf5-derive/tests/compile-fail/empty-enum.stderr b/hdf5-derive/tests/compile-fail/empty-enum.stderr index 95d6dc95c..f8fd7d87d 100644 --- a/hdf5-derive/tests/compile-fail/empty-enum.stderr +++ b/hdf5-derive/tests/compile-fail/empty-enum.stderr @@ -1,7 +1,5 @@ -error: proc-macro derive panicked - --> $DIR/empty-enum.rs:4:10 +error: cannot derive `H5Type` for empty enums + --> $DIR/empty-enum.rs:7:6 | -4 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: Cannot derive H5Type for empty enums +7 | enum Foo {} + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/empty-struct.stderr b/hdf5-derive/tests/compile-fail/empty-struct.stderr index 8c89edb38..3ad6402e5 100644 --- a/hdf5-derive/tests/compile-fail/empty-struct.stderr +++ b/hdf5-derive/tests/compile-fail/empty-struct.stderr @@ -1,7 +1,5 @@ -error: proc-macro derive panicked - --> $DIR/empty-struct.rs:4:10 +error: cannot derive `H5Type` for empty structs + --> $DIR/empty-struct.rs:7:8 | -4 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: Cannot derive H5Type for empty structs +7 | struct Foo {} + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/empty-tuple-struct.stderr b/hdf5-derive/tests/compile-fail/empty-tuple-struct.stderr index c5c9449be..c4ddab801 100644 --- a/hdf5-derive/tests/compile-fail/empty-tuple-struct.stderr +++ b/hdf5-derive/tests/compile-fail/empty-tuple-struct.stderr @@ -1,7 +1,5 @@ -error: proc-macro derive panicked - --> $DIR/empty-tuple-struct.rs:4:10 +error: cannot derive `H5Type` for empty tuple structs + --> $DIR/empty-tuple-struct.rs:7:8 | -4 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: Cannot derive H5Type for empty tuple structs +7 | struct Foo(); + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/enum-no-repr.stderr b/hdf5-derive/tests/compile-fail/enum-no-repr.stderr index 209556219..85c0f1c5b 100644 --- a/hdf5-derive/tests/compile-fail/enum-no-repr.stderr +++ b/hdf5-derive/tests/compile-fail/enum-no-repr.stderr @@ -1,7 +1,5 @@ -error: proc-macro derive panicked - --> $DIR/enum-no-repr.rs:4:10 +error: `H5Type` can only be derived for enums with explicit representation + --> $DIR/enum-no-repr.rs:7:6 | -4 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: H5Type can only be derived for enums with explicit representation +7 | enum Foo { + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/enum-non-scalar.stderr b/hdf5-derive/tests/compile-fail/enum-non-scalar.stderr index 0ab1f84c6..36b351888 100644 --- a/hdf5-derive/tests/compile-fail/enum-non-scalar.stderr +++ b/hdf5-derive/tests/compile-fail/enum-non-scalar.stderr @@ -1,7 +1,5 @@ -error: proc-macro derive panicked - --> $DIR/enum-non-scalar.rs:4:10 +error: `H5Type` can only be derived for enums with scalar discriminants + --> $DIR/enum-non-scalar.rs:7:6 | -4 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: H5Type can only be derived for enums with scalar discriminants +7 | enum Foo { + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/pd-empty-struct.stderr b/hdf5-derive/tests/compile-fail/pd-empty-struct.stderr index ee6e8e844..828afcc1b 100644 --- a/hdf5-derive/tests/compile-fail/pd-empty-struct.stderr +++ b/hdf5-derive/tests/compile-fail/pd-empty-struct.stderr @@ -1,7 +1,5 @@ -error: proc-macro derive panicked - --> $DIR/pd-empty-struct.rs:6:10 +error: cannot derive `H5Type` for empty structs + --> $DIR/pd-empty-struct.rs:9:8 | -6 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: Cannot derive H5Type for empty structs +9 | struct Foo { + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/pd-empty-tuple-struct.stderr b/hdf5-derive/tests/compile-fail/pd-empty-tuple-struct.stderr index b8aa8dee7..1d845d505 100644 --- a/hdf5-derive/tests/compile-fail/pd-empty-tuple-struct.stderr +++ b/hdf5-derive/tests/compile-fail/pd-empty-tuple-struct.stderr @@ -1,7 +1,5 @@ -error: proc-macro derive panicked - --> $DIR/pd-empty-tuple-struct.rs:6:10 +error: cannot derive `H5Type` for empty tuple structs + --> $DIR/pd-empty-tuple-struct.rs:9:8 | -6 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: Cannot derive H5Type for empty tuple structs +9 | struct Foo(PhantomData); + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/struct-c-repr.stderr b/hdf5-derive/tests/compile-fail/struct-c-repr.stderr deleted file mode 100644 index 8dde917fe..000000000 --- a/hdf5-derive/tests/compile-fail/struct-c-repr.stderr +++ /dev/null @@ -1,7 +0,0 @@ -error: proc-macro derive panicked - --> $DIR/struct-c-repr.rs:4:10 - | -4 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: H5Type requires #[repr(C)] for structs diff --git a/hdf5-derive/tests/compile-fail/struct-c-repr.rs b/hdf5-derive/tests/compile-fail/struct-no-repr.rs similarity index 63% rename from hdf5-derive/tests/compile-fail/struct-c-repr.rs rename to hdf5-derive/tests/compile-fail/struct-no-repr.rs index c442f53e0..b8250588f 100644 --- a/hdf5-derive/tests/compile-fail/struct-c-repr.rs +++ b/hdf5-derive/tests/compile-fail/struct-no-repr.rs @@ -3,7 +3,7 @@ use hdf5_derive::H5Type; #[derive(H5Type)] //~^ ERROR proc-macro derive -//~^^ HELP H5Type requires #[repr(C)] for structs +//~^^ HELP H5Type requires repr(C), repr(packed) or repr(transparent) for structs struct Foo { bar: i64, } diff --git a/hdf5-derive/tests/compile-fail/struct-no-repr.stderr b/hdf5-derive/tests/compile-fail/struct-no-repr.stderr new file mode 100644 index 000000000..6531dbd4f --- /dev/null +++ b/hdf5-derive/tests/compile-fail/struct-no-repr.stderr @@ -0,0 +1,5 @@ +error: `H5Type` requires repr(C), repr(packed) or repr(transparent) for structs + --> $DIR/struct-no-repr.rs:7:8 + | +7 | struct Foo { + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/tuple-struct-c-repr.stderr b/hdf5-derive/tests/compile-fail/tuple-struct-c-repr.stderr deleted file mode 100644 index bdf34305c..000000000 --- a/hdf5-derive/tests/compile-fail/tuple-struct-c-repr.stderr +++ /dev/null @@ -1,7 +0,0 @@ -error: proc-macro derive panicked - --> $DIR/tuple-struct-c-repr.rs:4:10 - | -4 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: H5Type requires #[repr(C)] for structs diff --git a/hdf5-derive/tests/compile-fail/tuple-struct-c-repr.rs b/hdf5-derive/tests/compile-fail/tuple-struct-no-repr.rs similarity index 59% rename from hdf5-derive/tests/compile-fail/tuple-struct-c-repr.rs rename to hdf5-derive/tests/compile-fail/tuple-struct-no-repr.rs index 0f35e007c..8e40fbb1d 100644 --- a/hdf5-derive/tests/compile-fail/tuple-struct-c-repr.rs +++ b/hdf5-derive/tests/compile-fail/tuple-struct-no-repr.rs @@ -3,7 +3,7 @@ use hdf5_derive::H5Type; #[derive(H5Type)] //~^ ERROR proc-macro derive -//~^^ HELP H5Type requires #[repr(C)] for structs +//~^^ HELP H5Type requires repr(C), repr(packed) or repr(transparent) for tuple structs struct Foo(i64); fn main() {} diff --git a/hdf5-derive/tests/compile-fail/tuple-struct-no-repr.stderr b/hdf5-derive/tests/compile-fail/tuple-struct-no-repr.stderr new file mode 100644 index 000000000..735d15909 --- /dev/null +++ b/hdf5-derive/tests/compile-fail/tuple-struct-no-repr.stderr @@ -0,0 +1,5 @@ +error: `H5Type` requires repr(C), repr(packed) or repr(transparent) for tuple structs + --> $DIR/tuple-struct-no-repr.rs:7:8 + | +7 | struct Foo(i64); + | ^^^ diff --git a/hdf5-derive/tests/compile-fail/unit-struct.stderr b/hdf5-derive/tests/compile-fail/unit-struct.stderr index 6b80b9847..2c736bc5b 100644 --- a/hdf5-derive/tests/compile-fail/unit-struct.stderr +++ b/hdf5-derive/tests/compile-fail/unit-struct.stderr @@ -1,7 +1,5 @@ -error: proc-macro derive panicked - --> $DIR/unit-struct.rs:4:10 +error: cannot derive `H5Type` for unit structs + --> $DIR/unit-struct.rs:7:8 | -4 | #[derive(H5Type)] - | ^^^^^^ - | - = help: message: Cannot derive H5Type for unit structs +7 | struct Foo; + | ^^^ diff --git a/hdf5-derive/tests/test.rs b/hdf5-derive/tests/test.rs index 6cb6c9363..e20b2fe98 100644 --- a/hdf5-derive/tests/test.rs +++ b/hdf5-derive/tests/test.rs @@ -1,3 +1,6 @@ +// due to compiler wrongfully complaining re: Copy impl missing for packed struct +#![allow(unaligned_references)] + #[macro_use] extern crate hdf5_derive; @@ -18,10 +21,10 @@ struct A { #[repr(C)] struct B { a: [A; 4], - b: FixedAscii<[u8; 8]>, + b: FixedAscii<8>, c: VarLenArray, d: bool, - e: FixedUnicode<[u8; 7]>, + e: FixedUnicode<7>, f: VarLenAscii, g: VarLenUnicode, } @@ -30,6 +33,57 @@ struct B { #[repr(C)] struct T(i64, pub u64); +#[derive(H5Type, Copy, Clone)] +#[repr(packed)] +struct P1 { + x: u8, + y: u64, +} + +#[derive(H5Type, Copy, Clone)] +#[repr(packed)] +struct P2(i8, u32); + +#[derive(H5Type)] +#[repr(transparent)] +struct T1 { + _x: u64, +} + +#[derive(H5Type)] +#[repr(transparent)] +struct T2(i32); + +#[test] +fn test_compound_packed() { + assert_eq!( + P1::type_descriptor(), + TD::Compound(CompoundType { + fields: vec![ + CompoundField::typed::("x", 0, 0), + CompoundField::typed::("y", 1, 1), + ], + size: 9, + }) + ); + assert_eq!( + P2::type_descriptor(), + TD::Compound(CompoundType { + fields: vec![ + CompoundField::typed::("0", 0, 0), + CompoundField::typed::("1", 1, 1), + ], + size: 5, + }) + ); +} + +#[test] +fn test_compound_transparent() { + assert_eq!(T1::type_descriptor(), u64::type_descriptor(),); + assert_eq!(T2::type_descriptor(), i32::type_descriptor(),); +} + #[test] fn test_compound_simple() { assert_eq!( diff --git a/hdf5-src/Cargo.toml b/hdf5-src/Cargo.toml index bee2abb48..8f71a86c4 100644 --- a/hdf5-src/Cargo.toml +++ b/hdf5-src/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hdf5-src" -version = "0.7.0" # !V +version = "0.8.1" # !V authors = ["Ivan Smirnov "] keywords = ["hdf5"] license-file = "ext/hdf5/COPYING" @@ -10,6 +10,8 @@ homepage = "https://github.com/aldanor/hdf5-rust" description = "Build script for compiling HDF5 C library from source." edition = "2018" links = "hdf5src" +readme = "README.md" +categories = ["ffi"] exclude = [ "ext/hdf5/bin/**", "ext/hdf5/c++/**", @@ -32,7 +34,7 @@ deprecated = [] threadsafe = [] [dependencies] -libz-sys = { version = "1.0.25", features = ["static"], optional = true } +libz-sys = { version = "1.0.25", features = ["static"], optional = true, default-features=false } [build-dependencies] cmake = "0.1.44" diff --git a/hdf5-src/README.md b/hdf5-src/README.md new file mode 100644 index 000000000..d7b13c50d --- /dev/null +++ b/hdf5-src/README.md @@ -0,0 +1,3 @@ +# hdf5-src + +Dummy crate for packaging and building `hdf5` from source. diff --git a/hdf5-src/build.rs b/hdf5-src/build.rs index f284874d7..0ba8be70e 100644 --- a/hdf5-src/build.rs +++ b/hdf5-src/build.rs @@ -18,6 +18,7 @@ fn main() { "HDF5_BUILD_JAVA", "HDF5_BUILD_FORTRAN", "HDF5_BUILD_CPP_LIB", + "HDF5_BUILD_UTILS", "HDF5_ENABLE_PARALLEL", ] { cfg.define(option, "OFF"); @@ -57,7 +58,8 @@ fn main() { } } - let debug_postfix = if cfg!(target_os = "windows") { "_D" } else { "_debug" }; + let targeting_windows = env::var("CARGO_CFG_TARGET_OS").unwrap() == "windows"; + let debug_postfix = if targeting_windows { "_D" } else { "_debug" }; if feature_enabled("HL") { cfg.define("HDF5_BUILD_HL_LIB", "ON"); @@ -71,6 +73,13 @@ fn main() { println!("cargo:hl_library={}", hdf5_hl_lib); } + if cfg!(unix) && targeting_windows { + let wine_exec = + if env::var("CARGO_CFG_TARGET_ARCH").unwrap() == "x86_64" { "wine64" } else { "wine" }; + // when cross-compiling to windows, use Wine to run code generation programs + cfg.define("CMAKE_CROSSCOMPILING_EMULATOR", wine_exec); + } + let dst = cfg.build(); println!("cargo:root={}", dst.display()); diff --git a/hdf5-src/ext/hdf5 b/hdf5-src/ext/hdf5 index f5331844e..db30c2da6 160000 --- a/hdf5-src/ext/hdf5 +++ b/hdf5-src/ext/hdf5 @@ -1 +1 @@ -Subproject commit f5331844e142e4a456ea5452ce3a3980f8bbf57c +Subproject commit db30c2da68ece4a155e9e50c28ec16d6057509b2 diff --git a/hdf5-sys/Cargo.toml b/hdf5-sys/Cargo.toml index 214df45f8..fa47725fc 100644 --- a/hdf5-sys/Cargo.toml +++ b/hdf5-sys/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "hdf5-sys" -version = "0.7.0" # !V +version = "0.8.1" # !V authors = ["Ivan Smirnov "] keywords = ["hdf5"] license = "MIT OR Apache-2.0" @@ -11,12 +11,13 @@ description = "Native bindings to the HDF5 library." edition = "2018" links = "hdf5" readme = "README.md" +categories = ["ffi", "filesystem", "science"] [dependencies] libc = "0.2" mpi-sys = { version = "0.1", optional = true } -libz-sys = { version = "1.0.25", optional = true } -hdf5-src = { path = "../hdf5-src", version = "0.7.0", optional = true } # !V +libz-sys = { version = "1.0.25", optional = true, default-features = false } +hdf5-src = { path = "../hdf5-src", version = "0.8.1", optional = true } # !V # Please see README for further explanation of these feature flags [features] @@ -27,10 +28,16 @@ threadsafe = ["hdf5-src/threadsafe"] zlib = ["libz-sys", "hdf5-src/zlib"] static = ["hdf5-src"] deprecated = ["hdf5-src/deprecated"] +conda = ["attohttpc", "sha2", "bzip2", "tar"] [build-dependencies] -libloading = "0.6" +libloading = "0.7" regex = { version = "1.3", features = ["std"] } +sha2 = { version = "0.9", optional = true } +attohttpc = { version = ">=0.12, <0.18", default-features = false, features = ["compress", "tls-rustls"], optional = true } +bzip2 = { version = "0.3.3", optional = true } +tar = { version = "*", optional = true } + [target.'cfg(all(unix, not(target_os = "macos")))'.build-dependencies] pkg-config = "0.3" @@ -38,7 +45,7 @@ pkg-config = "0.3" [target.'cfg(windows)'.build-dependencies] serde = "1.0" serde_derive = "1.0" -winreg = { version = "0.7", features = ["serialization-serde"]} +winreg = { version = "0.10", features = ["serialization-serde"]} [package.metadata.docs.rs] features = ["static", "zlib"] diff --git a/hdf5-sys/build.rs b/hdf5-sys/build.rs index 29c2c86a0..6698e45eb 100644 --- a/hdf5-sys/build.rs +++ b/hdf5-sys/build.rs @@ -1,3 +1,7 @@ +#![allow(clippy::must_use_candidate)] +#![allow(clippy::option_if_let_else)] + +use std::convert::TryInto; use std::env; use std::error::Error; use std::fmt::{self, Debug, Display}; @@ -20,12 +24,12 @@ pub struct Version { } impl Version { - pub fn new(major: u8, minor: u8, micro: u8) -> Self { + pub const fn new(major: u8, minor: u8, micro: u8) -> Self { Self { major, minor, micro } } pub fn parse(s: &str) -> Option { - let re = Regex::new(r"^(1)\.(8|10|12)\.(\d\d?)(_\d+)?(-patch\d+)?$").ok()?; + let re = Regex::new(r"^(1)\.(8|10|12|13)\.(\d\d?)(_\d+)?(-patch\d+)?$").ok()?; let captures = re.captures(s)?; Some(Self { major: captures.get(1).and_then(|c| c.as_str().parse::().ok())?, @@ -35,10 +39,7 @@ impl Version { } pub fn is_valid(self) -> bool { - self.major == 1 - && ((self.minor == 8 && self.micro >= 4) - || (self.minor == 10) - || (self.minor == 12 && self.micro == 0)) + self >= Self { major: 1, minor: 8, micro: 4 } } } @@ -85,24 +86,31 @@ impl Display for RuntimeError { #[allow(non_snake_case, non_camel_case_types)] fn get_runtime_version_single>(path: P) -> Result> { - let lib = libloading::Library::new(path.as_ref())?; - type H5open_t = unsafe extern "C" fn() -> c_int; - let H5open = unsafe { lib.get::(b"H5open")? }; - type H5get_libversion_t = unsafe extern "C" fn(*mut c_uint, *mut c_uint, *mut c_uint) -> c_int; + + let lib = unsafe { libloading::Library::new(path.as_ref()) }?; + let H5open = unsafe { lib.get::(b"H5open")? }; let H5get_libversion = unsafe { lib.get::(b"H5get_libversion")? }; let mut v: (c_uint, c_uint, c_uint) = (0, 0, 0); - unsafe { + let res = unsafe { if H5open() != 0 { Err("H5open()".into()) } else if H5get_libversion(&mut v.0, &mut v.1, &mut v.2) != 0 { Err("H5get_libversion()".into()) } else { - Ok(Version::new(v.0 as _, v.1 as _, v.2 as _)) - } - } + Ok(Version::new( + v.0.try_into().unwrap(), + v.1.try_into().unwrap(), + v.2.try_into().unwrap(), + )) + } + }; + // On macos libraries using TLS will corrupt TLS from rust. We delay closing + // the library until program exit by forgetting the library + std::mem::forget(lib); + res } fn validate_runtime_version(config: &Config) { @@ -124,28 +132,26 @@ fn validate_runtime_version(config: &Config) { } for link_path in &link_paths { if let Ok(paths) = fs::read_dir(link_path) { - for path in paths { - if let Ok(path) = path { - let path = path.path(); - if let Some(filename) = path.file_name() { - let filename = filename.to_str().unwrap_or(""); - if path.is_file() && libfiles.contains(&filename) { - println!("Attempting to load: {:?}", path); - match get_runtime_version_single(&path) { - Ok(version) => { - println!(" => runtime version = {:?}", version); - if version == config.header.version { - println!("HDF5 library runtime version matches headers."); - return; - } - panic!( - "Invalid HDF5 runtime version (expected: {:?}).", - config.header.version - ); - } - Err(err) => { - println!(" => {}", err); + for path in paths.flatten() { + let path = path.path(); + if let Some(filename) = path.file_name() { + let filename = filename.to_str().unwrap_or(""); + if path.is_file() && libfiles.contains(&filename) { + println!("Attempting to load: {:?}", path); + match get_runtime_version_single(&path) { + Ok(version) => { + println!(" => runtime version = {:?}", version); + if version == config.header.version { + println!("HDF5 library runtime version matches headers."); + return; } + panic!( + "Invalid HDF5 runtime version (expected: {:?}).", + config.header.version + ); + } + Err(err) => { + println!(" => {}", err); } } } @@ -200,12 +206,11 @@ impl Header { } else { panic!("Invalid H5_VERSION: {:?}", value); } - } + }; } - if !hdr.version.is_valid() { - panic!("Invalid H5_VERSION in the header: {:?}", hdr.version); - } + assert!(hdr.version.is_valid(), "Invalid H5_VERSION in the header: {:?}", hdr.version); + hdr } } @@ -232,7 +237,7 @@ pub struct LibrarySearcher { #[cfg(all(unix, not(target_os = "macos")))] mod unix { - use super::*; + use super::{is_inc_dir, LibrarySearcher}; pub fn find_hdf5_via_pkg_config(config: &mut LibrarySearcher) { if config.inc_dir.is_some() { @@ -480,12 +485,10 @@ impl LibrarySearcher { println!("Setting HDF5 root from environment variable:"); println!(" HDF5_DIR = {:?}", var); let root = PathBuf::from(var); - if root.is_relative() { - panic!("HDF5_DIR cannot be relative."); - } - if !root.is_dir() { - panic!("HDF5_DIR is not a directory."); - } + + assert!(!root.is_relative(), "HDF5_DIR cannot be relative."); + assert!(root.is_dir(), "HDF5_DIR is not a directory."); + config.user_provided_dir = true; config.inc_dir = Some(root.join("include")); } @@ -552,9 +555,7 @@ impl LibrarySearcher { pub fn finalize(self) -> Config { if let Some(ref inc_dir) = self.inc_dir { - if !is_inc_dir(inc_dir) { - panic!("Invalid HDF5 headers directory: {:?}", inc_dir); - } + assert!(is_inc_dir(inc_dir), "Invalid HDF5 headers directory: {:?}", inc_dir); let mut link_paths = self.link_paths; if link_paths.is_empty() { if let Some(root_dir) = inc_dir.parent() { @@ -566,12 +567,7 @@ impl LibrarySearcher { } let header = Header::parse(&inc_dir); if let Some(version) = self.version { - if header.version != version { - panic!( - "HDF5 header version mismatch: got {:?}, expected {:?}.", - header.version, version - ); - } + assert_eq!(header.version, version, "HDF5 header version mismatch",); } let config = Config { inc_dir: inc_dir.clone(), link_paths, header }; validate_runtime_version(&config); @@ -614,32 +610,41 @@ impl Config { let version = self.header.version; assert!(version >= Version::new(1, 8, 4), "required HDF5 version: >=1.8.4"); let mut vs: Vec<_> = (5..=21).map(|v| Version::new(1, 8, v)).collect(); // 1.8.[5-21] - vs.extend((0..=5).map(|v| Version::new(1, 10, v))); // 1.10.[0-5] - vs.push(Version::new(1, 12, 0)); // 1.12.0 + vs.extend((0..=8).map(|v| Version::new(1, 10, v))); // 1.10.[0-8] + vs.extend((0..=1).map(|v| Version::new(1, 12, v))); // 1.12.[0-1] + vs.extend((0..=0).map(|v| Version::new(1, 13, v))); // 1.13.[0-0] for v in vs.into_iter().filter(|&v| version >= v) { - println!("cargo:rustc-cfg=hdf5_{}_{}_{}", v.major, v.minor, v.micro); + println!("cargo:rustc-cfg=feature=\"{}.{}.{}\"", v.major, v.minor, v.micro); println!("cargo:version_{}_{}_{}=1", v.major, v.minor, v.micro); } if self.header.have_stdbool_h { - println!("cargo:rustc-cfg=h5_have_stdbool_h"); - println!("cargo:have_stdbool=1"); + println!("cargo:rustc-cfg=have_stdbool_h"); + // there should be no need to export have_stdbool_h downstream } if self.header.have_direct { - println!("cargo:rustc-cfg=h5_have_direct"); + println!("cargo:rustc-cfg=feature=\"have-direct\""); println!("cargo:have_direct=1"); } if self.header.have_parallel { - println!("cargo:rustc-cfg=h5_have_parallel"); + println!("cargo:rustc-cfg=feature=\"have-parallel\""); println!("cargo:have_parallel=1"); } if self.header.have_threadsafe { - println!("cargo:rustc-cfg=h5_have_threadsafe"); + println!("cargo:rustc-cfg=feature=\"have-threadsafe\""); println!("cargo:have_threadsafe=1"); } } } fn main() { + #[cfg(feature = "conda")] + if env::var("CARGO_FEATURE_CONDA").is_ok() { + println!("cargo:rerun-if-changed=build.rs"); + println!("cargo:rerun-if-env-changed=CARGO_FEATURE_CONDA"); + conda_dl::conda_static(); + return; + } + if feature_enabled("STATIC") && std::env::var_os("HDF5_DIR").is_none() { get_build_and_emit(); } else { @@ -683,3 +688,173 @@ fn get_build_and_emit() { let config = Config { header, inc_dir: "".into(), link_paths: Vec::new() }; config.emit_cfg_flags(); } + +/// Download HDF5 binary builds from Conda and statically link to them. +#[cfg(feature = "conda")] +mod conda_dl { + use super::*; + + use std::io::{self, Read}; + use std::path::PathBuf; + use std::time::Duration; + + use bzip2::read::BzDecoder; + use sha2::Digest; + use tar::Archive; + + #[cfg(target_os = "linux")] + mod conda { + pub const INC_PATH: &'static str = "include"; + pub const LIB_PATH: &'static str = "lib"; + + pub const DLS: &[(&'static str, &'static str, &'static str)] = &[ + ( + "hdf5-1.12.0-hc3cf35f_0.tar.bz2", + "https://anaconda.org/anaconda/hdf5/1.12.0/download/linux-64/hdf5-1.12.0-hc3cf35f_0.tar.bz2", + "1681beaec0bcbd0025731735902744ef4db6a3084d9d325d814ec9758a2da31c", + ), + ( + "zlib-1.2.11-hfbfcf68_1.tar.bz2", + "https://repo.continuum.io/pkgs/main/linux-64/zlib-1.2.11-hfbfcf68_1.tar.bz2", + "97c9bd774d08dcc6383c297e4378e1486061e7af426f3e62fda449454de7defb", + ), + ]; + } + + #[cfg(all(target_os = "macos", target_arch = "x86_64"))] + mod conda { + pub const INC_PATH: &'static str = "include"; + pub const LIB_PATH: &'static str = "lib"; + + pub const DLS: &[(&'static str, &'static str, &'static str)] = &[ + ( + "hdf5-1.12.0-h964e04d_0.tar.bz2", + "https://anaconda.org/anaconda/hdf5/1.12.0/download/osx-64/hdf5-1.12.0-h964e04d_0.tar.bz2", + "1d6e4058ab8ad0ea70dd755b62310a956f9a026ab6a3336ff4bd605c806d7c60", + ), + ( + "zlib-1.2.11-hf3cbc9b_2.tar.bz2", + "https://repo.continuum.io/pkgs/main/osx-64/zlib-1.2.11-hf3cbc9b_2.tar.bz2", + "a82e1e2900b095f50ab7b126f31a6fe5caab0f1cb31a7bba45451df47aba0dd6", + ), + ]; + } + + #[cfg(all(target_os = "macos", target_arch = "aarch64"))] + mod conda { + pub const INC_PATH: &'static str = "include"; + pub const LIB_PATH: &'static str = "lib"; + + pub const DLS: &[(&'static str, &'static str, &'static str)] = &[ + ( + "hdf5-1.12.1-nompi_had0e5e0_101.tar.bz2", + "https://anaconda.org/conda-forge/hdf5/1.12.1/download/osx-arm64/hdf5-1.12.1-nompi_had0e5e0_101.tar.bz2", + "479a943c508ea796b708d4eaaa6bf03b5168c877a454190d67a75f28a44a2461", + ), + ( + "zlib-1.2.11-hee7b306_1013.tar.bz2", + "https://anaconda.org/conda-forge/zlib/1.2.11/download/osx-arm64/zlib-1.2.11-hee7b306_1013.tar.bz2", + "04cbcc43aaf9b1ba31eddb0a93adb1a025156542fd4ba2b7b66b4ba4f4126d50", + ), + ]; + } + + #[cfg(target_os = "windows")] + mod conda { + pub const INC_PATH: &'static str = "Library\\include"; + pub const LIB_PATH: &'static str = "Library\\lib"; + + pub const DLS: &[(&'static str, &'static str, &'static str)] = &[ + ( + "hdf5-1.12.0-h1756f20_0.tar.bz2", + "https://anaconda.org/anaconda/hdf5/1.12.0/download/win-64/hdf5-1.12.0-h1756f20_0.tar.bz2", + "747997999fc56b5c878cc8ee224b486266ac1c77ddf8407529a91eb851abf3d7", + ), + ( + "zlib-1.2.11-vc14h1cdd9ab_1.tar.bz2", + "https://repo.continuum.io/pkgs/main/win-64/zlib-1.2.11-vc14h1cdd9ab_1.tar.bz2", + "cf7f895c0ff7f238ed02182a89864386bfc198674dc280002ff4a47ae0993b0e", + ), + ]; + } + + fn download(uri: &str, filename: &str, out_dir: &Path) { + let out = PathBuf::from(out_dir.join(filename)); + + // Download the tarball. + let f = fs::File::create(&out).unwrap(); + let writer = io::BufWriter::new(f); + + let req = attohttpc::get(uri).read_timeout(Duration::new(90, 0)); + + let response = req.send().unwrap(); + + if !response.is_success() { + panic!("Unexpected response code {:?} for {}", response.status(), uri); + } + + response.write_to(writer).unwrap(); + } + + fn calc_sha256(path: &Path) -> String { + let mut f = io::BufReader::new(fs::File::open(path).unwrap()); + let mut buf = Vec::new(); + f.read_to_end(&mut buf).unwrap(); + + let digest = sha2::Sha256::digest(&buf); + format!("{:x}", digest) + } + + fn extract, P2: AsRef>(archive_path: P, extract_to: P2) { + let file = fs::File::open(archive_path).unwrap(); + let unzipped = BzDecoder::new(file); + let mut a = Archive::new(unzipped); + a.unpack(extract_to).unwrap(); + } + + pub fn conda_static() { + let out_dir = PathBuf::from(env::var("OUT_DIR").unwrap()); + + for (archive, uri, sha256) in conda::DLS { + let archive_path = out_dir.join(archive); + if archive_path.exists() && calc_sha256(&archive_path) == *sha256 { + println!("Use existings archive"); + } else { + println!("Download archive"); + download(uri, archive, &out_dir); + extract(&archive_path, &out_dir); + + let sum = calc_sha256(&archive_path); + if sum != *sha256 { + panic!( + "check sum of downloaded archive {} is incorrect: sha256sum={}", + archive, sum + ); + } + } + } + + println!("cargo:rustc-link-search={}", out_dir.join(conda::LIB_PATH).display()); + + #[cfg(target_os = "windows")] + { + println!("cargo:rustc-link-lib=static=zlibstatic"); + println!("cargo:rustc-link-lib=static=libhdf5"); + } + + #[cfg(target_os = "linux")] + println!("cargo:rustc-link-lib=static=hdf5"); + + #[cfg(target_os = "macos")] + println!("cargo:rustc-link-lib=static=hdf5"); + + #[cfg(not(target_os = "windows"))] + println!("cargo:rustc-link-lib=static=z"); + + let inc_dir = out_dir.join(conda::INC_PATH); + + let header = Header::parse(&inc_dir); + let cfg = Config { inc_dir, link_paths: Vec::new(), header }; + cfg.emit_cfg_flags(); + } +} diff --git a/hdf5-sys/src/h5.rs b/hdf5-sys/src/h5.rs index 7bcf25744..ba6bc5d48 100644 --- a/hdf5-sys/src/h5.rs +++ b/hdf5-sys/src/h5.rs @@ -1,3 +1,4 @@ +//! General purpose library functions use std::mem; pub use self::H5_index_t::*; @@ -7,13 +8,22 @@ use crate::internal_prelude::*; pub type herr_t = c_int; pub type htri_t = c_int; + +#[cfg(not(feature = "1.13.0"))] pub type hsize_t = c_ulonglong; +#[cfg(feature = "1.13.0")] +pub type hsize_t = u64; + +#[cfg(not(feature = "1.13.0"))] pub type hssize_t = c_longlong; -pub type haddr_t = uint64_t; +#[cfg(feature = "1.13.0")] +pub type hssize_t = i64; -#[cfg(all(hdf5_1_10_0, h5_have_stdbool_h))] +pub type haddr_t = u64; + +#[cfg(any(all(feature = "1.10.0", have_stdbool_h), feature = "1.13.0"))] pub type hbool_t = u8; -#[cfg(any(not(hdf5_1_10_0), not(h5_have_stdbool_h)))] +#[cfg(not(any(all(feature = "1.10.0", have_stdbool_h), feature = "1.13.0")))] pub type hbool_t = c_uint; #[repr(C)] @@ -70,18 +80,47 @@ extern "C" { pub fn H5check_version(majnum: c_uint, minnum: c_uint, relnum: c_uint) -> herr_t; } -#[cfg(hdf5_1_8_13)] +#[cfg(feature = "1.8.13")] extern "C" { pub fn H5free_memory(mem: *mut c_void) -> herr_t; } -#[cfg(hdf5_1_8_15)] +#[cfg(feature = "1.8.15")] extern "C" { pub fn H5allocate_memory(size: size_t, clear: hbool_t) -> *mut c_void; pub fn H5resize_memory(mem: *mut c_void, size: size_t) -> *mut c_void; } -#[cfg(hdf5_1_8_16)] +#[cfg(feature = "1.8.16")] extern "C" { pub fn H5is_library_threadsafe(is_ts: *mut hbool_t) -> herr_t; } + +#[cfg(any(all(feature = "1.10.7", not(feature = "1.12.0")), feature = "1.12.1"))] +#[repr(C)] +pub struct H5_alloc_stats_t { + total_alloc_bytes: c_ulonglong, + curr_alloc_bytes: size_t, + peak_alloc_bytes: size_t, + max_block_size: size_t, + total_alloc_blocks_count: size_t, + curr_alloc_blocks_count: size_t, + peak_alloc_blocks_count: size_t, +} + +#[cfg(any(all(feature = "1.10.7", not(feature = "1.12.0")), feature = "1.12.1"))] +extern "C" { + pub fn H5get_alloc_stats(stats: *mut H5_alloc_stats_t) -> herr_t; + pub fn H5get_free_list_sizes( + reg_size: *mut size_t, arr_size: *mut size_t, blk_size: *mut size_t, fac_size: *mut size_t, + ) -> herr_t; +} + +#[cfg(feature = "1.13.0")] +type H5_atclose_func_t = Option; + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5atclose(func: H5_atclose_func_t, ctx: *mut c_void) -> herr_t; + pub fn H5is_library_terminating(is_terminating: *mut hbool_t) -> herr_t; +} diff --git a/hdf5-sys/src/h5a.rs b/hdf5-sys/src/h5a.rs index 75c67afaa..ec7ec8f6f 100644 --- a/hdf5-sys/src/h5a.rs +++ b/hdf5-sys/src/h5a.rs @@ -1,8 +1,13 @@ +//! Creating and manipulating HDF5 attributes use std::mem; use crate::internal_prelude::*; use crate::h5o::H5O_msg_crt_idx_t; +pub use { + H5A_operator2_t as H5A_operator_t, H5A_operator2_t as H5A_operator_r, H5Acreate2 as H5Acreate, + H5Aiterate2 as H5Aiterate, +}; #[repr(C)] #[derive(Copy, Clone)] @@ -19,6 +24,15 @@ impl Default for H5A_info_t { } } +#[deprecated(note = "deprecated in HDF5 1.8.0, use H5A_operator2_t")] +pub type H5A_operator1_t = Option< + extern "C" fn( + location_id: hid_t, + attr_name: *const c_char, + operator_data: *mut c_void, + ) -> herr_t, +>; + pub type H5A_operator2_t = Option< extern "C" fn( location_id: hid_t, @@ -92,4 +106,77 @@ extern "C" { pub fn H5Aexists_by_name( obj_id: hid_t, obj_name: *const c_char, attr_name: *const c_char, lapl_id: hid_t, ) -> htri_t; + + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Aget_info")] + pub fn H5Aget_num_attrs(loc_id: hid_t) -> c_int; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Aopen_by_idx")] + pub fn H5Aopen_idx(loc_id: hid_t, idx: c_uint) -> hid_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Aopen_by_name")] + pub fn H5Aopen_name(loc_id: hid_t, name: *const c_char) -> hid_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Acreate2")] + pub fn H5Acreate1( + loc_id: hid_t, name: *const c_char, type_id: hid_t, space_id: hid_t, acpl_id: hid_t, + ) -> hid_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Aiterate2")] + pub fn H5Aiterate1( + loc_id: hid_t, attr_num: *mut c_uint, op: H5A_operator1_t, op_data: *mut c_void, + ) -> herr_t; +} + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Aclose_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, attr_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Acreate_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + attr_name: *const c_char, type_id: hid_t, space_id: hid_t, acpl_id: hid_t, aapl_id: hid_t, + es_id: hid_t, + ) -> hid_t; + pub fn H5Acreate_by_name_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + obj_name: *const c_char, attr_name: *const c_char, type_id: hid_t, space_id: hid_t, + acpl_id: hid_t, aapl_id: hid_t, lapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Aexists_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, obj_id: hid_t, + attr_name: *const c_char, exists: *mut hbool_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Aexists_by_name_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + obj_name: *const c_char, attr_name: *const c_char, exists: *mut hbool_t, lapl_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Aopen_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, obj_id: hid_t, + attr_name: *const c_char, aapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Aopen_by_idx_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + obj_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, n: c_ulong, + aapl_id: hid_t, lapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Aopen_by_name_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + obj_name: *const c_char, attr_name: *const c_char, aapl_id: hid_t, lapl_id: hid_t, + es_id: hid_t, + ) -> hid_t; + pub fn H5Aread_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, attr_id: hid_t, + dtype_id: hid_t, buf: *mut c_void, es_id: hid_t, + ) -> herr_t; + pub fn H5Arename_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + old_name: *const c_char, new_name: *const c_char, es_id: hid_t, + ) -> herr_t; + pub fn H5Arename_by_name_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + obj_name: *const c_char, old_attr_name: *const c_char, new_attr_name: *const c_char, + lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Awrite_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, attr_id: hid_t, + type_id: hid_t, buf: *const c_void, es_id: hid_t, + ) -> herr_t; } diff --git a/hdf5-sys/src/h5ac.rs b/hdf5-sys/src/h5ac.rs index 846a3339c..af3ef0f26 100644 --- a/hdf5-sys/src/h5ac.rs +++ b/hdf5-sys/src/h5ac.rs @@ -1,3 +1,4 @@ +//! Cache functions use std::mem; use crate::internal_prelude::*; @@ -44,9 +45,9 @@ pub struct H5AC_cache_config_t { pub epochs_before_eviction: c_int, pub apply_empty_reserve: hbool_t, pub empty_reserve: c_double, - #[cfg(not(hdf5_1_10_0))] + #[cfg(not(feature = "1.10.0"))] pub dirty_bytes_threshold: c_int, - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub dirty_bytes_threshold: size_t, pub metadata_write_strategy: c_int, } @@ -57,7 +58,7 @@ impl Default for H5AC_cache_config_t { } } -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] mod hdf5_1_10_1 { use super::*; @@ -77,5 +78,5 @@ mod hdf5_1_10_1 { } } -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] pub use self::hdf5_1_10_1::*; diff --git a/hdf5-sys/src/h5c.rs b/hdf5-sys/src/h5c.rs index 7eb8dc37d..8df09e70b 100644 --- a/hdf5-sys/src/h5c.rs +++ b/hdf5-sys/src/h5c.rs @@ -1,3 +1,4 @@ +//! Cache functionality pub use self::H5C_cache_decr_mode::*; pub use self::H5C_cache_flash_incr_mode::*; pub use self::H5C_cache_incr_mode::*; diff --git a/hdf5-sys/src/h5d.rs b/hdf5-sys/src/h5d.rs index 7d6d6383a..d279367c8 100644 --- a/hdf5-sys/src/h5d.rs +++ b/hdf5-sys/src/h5d.rs @@ -1,3 +1,4 @@ +//! Creating and manipulating scientific datasets pub use self::H5D_alloc_time_t::*; pub use self::H5D_fill_time_t::*; pub use self::H5D_fill_value_t::*; @@ -6,6 +7,7 @@ pub use self::H5D_mpio_actual_chunk_opt_mode_t::*; pub use self::H5D_mpio_actual_io_mode_t::*; pub use self::H5D_mpio_no_collective_cause_t::*; pub use self::H5D_space_status_t::*; +pub use {H5Dcreate2 as H5D_create, H5Dopen2 as H5Dopen}; use crate::internal_prelude::*; @@ -14,7 +16,7 @@ pub const H5D_CHUNK_CACHE_NBYTES_DEFAULT: size_t = !0; pub const H5D_CHUNK_CACHE_W0_DEFAULT: c_float = -1.0; -#[cfg(not(hdf5_1_10_0))] +#[cfg(not(feature = "1.10.0"))] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5D_layout_t { @@ -27,7 +29,7 @@ pub enum H5D_layout_t { impl Default for H5D_layout_t { fn default() -> Self { - H5D_layout_t::H5D_CONTIGUOUS + Self::H5D_CONTIGUOUS } } @@ -48,7 +50,7 @@ pub enum H5D_alloc_time_t { impl Default for H5D_alloc_time_t { fn default() -> Self { - H5D_alloc_time_t::H5D_ALLOC_TIME_DEFAULT + Self::H5D_ALLOC_TIME_DEFAULT } } @@ -72,7 +74,7 @@ pub enum H5D_fill_time_t { impl Default for H5D_fill_time_t { fn default() -> Self { - H5D_fill_time_t::H5D_FILL_TIME_IFSET + Self::H5D_FILL_TIME_IFSET } } @@ -87,7 +89,7 @@ pub enum H5D_fill_value_t { impl Default for H5D_fill_value_t { fn default() -> Self { - H5D_fill_value_t::H5D_FILL_VALUE_DEFAULT + Self::H5D_FILL_VALUE_DEFAULT } } @@ -132,7 +134,7 @@ pub type H5D_operator_t = Option< ) -> herr_t, >; -#[cfg(hdf5_1_8_11)] +#[cfg(feature = "1.8.11")] pub type H5D_scatter_func_t = Option< extern "C" fn( src_buf: *mut *const c_void, @@ -140,7 +142,7 @@ pub type H5D_scatter_func_t = Option< op_data: *mut c_void, ) -> herr_t, >; -#[cfg(hdf5_1_8_11)] +#[cfg(feature = "1.8.11")] pub type H5D_gather_func_t = Option< extern "C" fn( dst_buf: *const c_void, @@ -178,6 +180,7 @@ extern "C" { buf: *mut c_void, type_id: hid_t, space_id: hid_t, op: H5D_operator_t, operator_data: *mut c_void, ) -> herr_t; + #[cfg_attr(feature = "1.12.0", deprecated(note = "deprecated in HDF5 1.12.0, use H5Treclaim"))] pub fn H5Dvlen_reclaim( type_id: hid_t, space_id: hid_t, plist_id: hid_t, buf: *mut c_void, ) -> herr_t; @@ -189,9 +192,16 @@ extern "C" { ) -> herr_t; pub fn H5Dset_extent(dset_id: hid_t, size: *const hsize_t) -> herr_t; pub fn H5Ddebug(dset_id: hid_t) -> herr_t; + + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Dcreate2")] + pub fn H5Dcreate1( + file_id: hid_t, name: *const c_char, type_id: hid_t, space_id: hid_t, dcpl_id: hid_t, + ) -> hid_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Dopen2")] + pub fn H5Dopen1(file_id: hid_t, name: *const c_char) -> hid_t; } -#[cfg(hdf5_1_8_11)] +#[cfg(feature = "1.8.11")] extern "C" { pub fn H5Dscatter( op: H5D_scatter_func_t, op_data: *mut c_void, type_id: hid_t, dst_space_id: hid_t, @@ -203,7 +213,7 @@ extern "C" { ) -> herr_t; } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] mod hdf5_1_10_0 { use super::*; @@ -228,7 +238,7 @@ mod hdf5_1_10_0 { impl Default for H5D_vds_view_t { fn default() -> Self { - H5D_vds_view_t::H5D_VDS_LAST_AVAILABLE + Self::H5D_VDS_LAST_AVAILABLE } } @@ -250,10 +260,10 @@ mod hdf5_1_10_0 { } } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] pub use self::hdf5_1_10_0::*; -#[cfg(hdf5_1_10_3)] +#[cfg(feature = "1.10.3")] extern "C" { pub fn H5Dread_chunk( dset_id: hid_t, dxpl_id: hid_t, offset: *const hsize_t, filters: *mut u32, buf: *mut c_void, @@ -264,7 +274,7 @@ extern "C" { ) -> herr_t; } -#[cfg(hdf5_1_10_5)] +#[cfg(feature = "1.10.5")] extern "C" { pub fn H5Dget_chunk_info( dset_id: hid_t, fspace_id: hid_t, index: hsize_t, offset: *mut hsize_t, @@ -276,3 +286,52 @@ extern "C" { ) -> herr_t; pub fn H5Dget_num_chunks(dset_id: hid_t, fspace_id: hid_t, nchunks: *mut hsize_t) -> herr_t; } + +#[cfg(feature = "1.13.0")] +pub type H5D_chunk_iter_op_t = Option< + extern "C" fn( + offset: *const hsize_t, + filter_mask: u32, + addr: haddr_t, + nbytes: u32, + op_data: *mut c_void, + ) -> c_int, +>; + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Dchunk_iter( + dset_id: hid_t, dxpl: hid_t, cb: H5D_chunk_iter_op_t, op_data: *mut c_void, + ) -> herr_t; + pub fn H5Dclose_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, dset_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Dcreate_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, type_id: hid_t, space_id: hid_t, lcpl_id: hid_t, dcpl_id: hid_t, + dapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Dget_space_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, dset_id: hid_t, + es_id: hid_t, + ) -> hid_t; + pub fn H5Dopen_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, dapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Dread_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, dset_id: hid_t, + mem_type_id: hid_t, mem_space_id: hid_t, file_space_id: hid_t, dxpl_id: hid_t, + buf: *mut c_void, es_id: hid_t, + ) -> herr_t; + pub fn H5Dset_extent_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, dset_id: hid_t, + size: *mut c_ulong, es_id: hid_t, + ) -> herr_t; + pub fn H5Dwrite_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, dset_id: hid_t, + mem_type_id: hid_t, mem_space_id: hid_t, file_space_id: hid_t, dxpl_id: hid_t, + buf: *const c_void, es_id: hid_t, + ) -> herr_t; +} diff --git a/hdf5-sys/src/h5e.rs b/hdf5-sys/src/h5e.rs index f656aee84..1ff4d8b9e 100644 --- a/hdf5-sys/src/h5e.rs +++ b/hdf5-sys/src/h5e.rs @@ -1,7 +1,13 @@ +//! Functions for handling errors that occur within HDF5 use std::mem; pub use self::H5E_direction_t::*; pub use self::H5E_type_t::*; +pub use { + H5E_auto2_t as H5E_auto_t, H5E_error2_t as H5E_error_t, H5E_walk2_t as H5E_walk_t, + H5Eclear2 as H5Eclear, H5Eget_auto2 as H5Eget_auto, H5Eprint2 as H5Eprint, H5Epush2 as H5Epush, + H5Eset_auto2 as H5Eset_auto, H5Ewalk2 as H5Ewalk, +}; use crate::internal_prelude::*; @@ -14,6 +20,21 @@ pub enum H5E_type_t { H5E_MINOR = 1, } +pub type H5E_major_t = hid_t; +pub type H5E_minor_t = hid_t; + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +#[deprecated(note = "deprecated in HDF5 1.8.0, use H5E_error2_t")] +pub struct H5E_error1_t { + maj_num: H5E_major_t, + min_num: H5E_minor_t, + func_name: *const c_char, + file_name: *const c_char, + line: c_uint, + desc: *const c_char, +} + #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5E_error2_t { @@ -39,6 +60,13 @@ pub enum H5E_direction_t { H5E_WALK_DOWNWARD = 1, } +#[deprecated(note = "deprecated in HDF5 1.8.0, use H5E_walk2_t")] +pub type H5E_walk1_t = Option< + unsafe extern "C" fn(n: c_int, err_desc: *mut H5E_error1_t, client_data: *mut c_void) -> herr_t, +>; +#[deprecated(note = "deprecated in HDF5 1.8.0, use H5E_auto2_t")] +pub type H5E_auto1_t = Option herr_t>; + pub type H5E_walk2_t = Option< unsafe extern "C" fn( n: c_uint, @@ -80,6 +108,33 @@ extern "C" { msg_id: hid_t, type_: *mut H5E_type_t, msg: *mut c_char, size: size_t, ) -> ssize_t; pub fn H5Eget_num(error_stack_id: hid_t) -> ssize_t; + + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Epush2")] + pub fn H5Epush1( + file: *const c_char, func: *const c_char, line: c_uint, maj: H5E_major_t, min: H5E_minor_t, + str_: *const c_char, + ) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Eprint2")] + pub fn H5Eprint1(stream: *mut FILE) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Ewalk2")] + pub fn H5Ewalk1( + direction: H5E_direction_t, func: H5E_walk1_t, client_data: *mut c_void, + ) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Eget_auto2")] + pub fn H5Eget_auto1(func: *mut H5E_auto1_t, client_data: *mut *mut c_void) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Eset_auto2")] + pub fn H5Eset_auto1(func: H5E_auto1_t, client_data: *mut c_void) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Eclear2")] + pub fn H5Eclear1() -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Eget_msg")] + pub fn H5Eget_major(maj: H5E_major_t) -> *mut c_char; + #[deprecated(note = "deprecated in HDF5 1.8.0")] + pub fn H5Eget_minor(min: H5E_minor_t) -> *mut c_char; + + #[cfg(feature = "1.13.0")] + pub fn H5Eappend_stack( + dst_stack_id: hid_t, src_stack_id: hid_t, close_source_stack: hbool_t, + ) -> herr_t; } pub use self::globals::*; @@ -111,6 +166,7 @@ mod globals { extern_static!(H5E_RS, H5E_RS_g); extern_static!(H5E_HEAP, H5E_HEAP_g); extern_static!(H5E_OHDR, H5E_OHDR_g); + #[cfg(not(feature = "1.13.0"))] extern_static!(H5E_ATOM, H5E_ATOM_g); extern_static!(H5E_ATTR, H5E_ATTR_g); extern_static!(H5E_NONE_MAJOR, H5E_NONE_MAJOR_g); @@ -186,6 +242,7 @@ mod globals { extern_static!(H5E_BADFILE, H5E_BADFILE_g); extern_static!(H5E_TRUNCATED, H5E_TRUNCATED_g); extern_static!(H5E_MOUNT, H5E_MOUNT_g); + #[cfg(not(feature = "1.13.0"))] extern_static!(H5E_BADATOM, H5E_BADATOM_g); extern_static!(H5E_BADGROUP, H5E_BADGROUP_g); extern_static!(H5E_CANTREGISTER, H5E_CANTREGISTER_g); @@ -239,6 +296,28 @@ mod globals { extern_static!(H5E_CANTREMOVE, H5E_CANTREMOVE_g); extern_static!(H5E_CANTCONVERT, H5E_CANTCONVERT_g); extern_static!(H5E_BADSIZE, H5E_BADSIZE_g); + #[cfg(feature = "1.12.1")] + extern_static!(H5E_CANTLOCKFILE, H5E_CANTLOCKFILE_g); + #[cfg(feature = "1.12.1")] + extern_static!(H5E_CANTUNLOCKFILE, H5E_CANTUNLOCKFILE_g); + #[cfg(feature = "1.12.1")] + extern_static!(H5E_LIB, H5E_LIB_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_BADID, H5E_BADID_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_CANTCANCEL, H5E_CANTCANCEL_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_CANTFIND, H5E_CANTFIND_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_CANTPUT, H5E_CANTPUT_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_CANTWAIT, H5E_CANTWAIT_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_EVENTSET, H5E_EVENTSET_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_ID, H5E_ID_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_UNMOUNT, H5E_UNMOUNT_g); } #[cfg(all(target_env = "msvc", not(feature = "static")))] @@ -269,6 +348,7 @@ mod globals { extern_static!(H5E_RS, __imp_H5E_RS_g); extern_static!(H5E_HEAP, __imp_H5E_HEAP_g); extern_static!(H5E_OHDR, __imp_H5E_OHDR_g); + #[cfg(not(feature = "1.13.0"))] extern_static!(H5E_ATOM, __imp_H5E_ATOM_g); extern_static!(H5E_ATTR, __imp_H5E_ATTR_g); extern_static!(H5E_NONE_MAJOR, __imp_H5E_NONE_MAJOR_g); @@ -344,6 +424,7 @@ mod globals { extern_static!(H5E_BADFILE, __imp_H5E_BADFILE_g); extern_static!(H5E_TRUNCATED, __imp_H5E_TRUNCATED_g); extern_static!(H5E_MOUNT, __imp_H5E_MOUNT_g); + #[cfg(not(feature = "1.13.0"))] extern_static!(H5E_BADATOM, __imp_H5E_BADATOM_g); extern_static!(H5E_BADGROUP, __imp_H5E_BADGROUP_g); extern_static!(H5E_CANTREGISTER, __imp_H5E_CANTREGISTER_g); @@ -397,4 +478,26 @@ mod globals { extern_static!(H5E_CANTREMOVE, __imp_H5E_CANTREMOVE_g); extern_static!(H5E_CANTCONVERT, __imp_H5E_CANTCONVERT_g); extern_static!(H5E_BADSIZE, __imp_H5E_BADSIZE_g); + #[cfg(feature = "1.12.1")] + extern_static!(H5E_CANTLOCKFILE, __imp_H5E_CANTLOCKFILE_g); + #[cfg(feature = "1.12.1")] + extern_static!(H5E_CANTUNLOCKFILE, __imp_H5E_CANTUNLOCKFILE_g); + #[cfg(feature = "1.12.1")] + extern_static!(H5E_LIB, __imp_H5E_LIB_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_BADID, __imp_H5E_BADID_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_CANTCANCEL, __imp_H5E_CANTCANCEL_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_CANTFIND, __imp_H5E_CANTFIND_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_CANTPUT, __imp_H5E_CANTPUT_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_CANTWAIT, __imp_H5E_CANTWAIT_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_EVENTSET, __imp_H5E_EVENTSET_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_ID, __imp_H5E_ID_g); + #[cfg(feature = "1.13.0")] + extern_static!(H5E_UNMOUNT, __imp_H5E_UNMOUNT_g); } diff --git a/hdf5-sys/src/h5es.rs b/hdf5-sys/src/h5es.rs new file mode 100644 index 000000000..8ea81ad11 --- /dev/null +++ b/hdf5-sys/src/h5es.rs @@ -0,0 +1,87 @@ +//! Event set module +use crate::internal_prelude::*; + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct H5ES_err_info_t { + api_cname: *mut c_char, + api_args: *mut c_char, + + app_file_name: *mut c_char, + app_func_name: *mut c_char, + app_line_num: c_uint, + + op_ins_count: u64, + op_ins_ts: u64, + op_exec_ts: u64, + op_exec_time: u64, + + err_stack_id: hid_t, +} + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct H5ES_op_info_t { + api_cname: *const c_char, + api_args: *mut c_char, + + app_file_name: *const c_char, + app_func_name: *const c_char, + app_line_num: c_uint, + + op_ins_count: u64, + op_ins_ts: u64, + op_exec_ts: u64, + op_exec_time: u64, +} + +#[repr(C)] +#[derive(Debug, PartialEq, Eq, Copy, Clone)] +pub enum H5ES_status_t { + H5ES_STATUS_IN_PROGRESS, + H5ES_STATUS_SUCCEED, + H5ES_STATUS_CANCELED, + H5ES_STATUS_FAIL, +} + +pub type H5ES_event_complete_func_t = Option< + extern "C" fn( + op_info: *mut H5ES_op_info_t, + status: H5ES_status_t, + err_stack: hid_t, + ctx: *mut c_void, + ) -> c_int, +>; + +pub type H5ES_event_insert_func_t = + Option c_int>; + +extern "C" { + pub fn H5ESinsert_request(es_id: hid_t, connector_id: hid_t, request: *mut c_void) -> herr_t; +} + +extern "C" { + pub fn H5EScancel( + es_id: hid_t, num_not_canceled: *mut size_t, err_occured: *mut hbool_t, + ) -> herr_t; + pub fn H5ESclose(es_id: hid_t) -> herr_t; + pub fn H5EScreate() -> hid_t; + pub fn H5ESfree_err_info(num_err_info: size_t, err_info: *mut H5ES_err_info_t) -> herr_t; + pub fn H5ESget_count(es_id: hid_t, count: *mut size_t) -> herr_t; + pub fn H5ESget_err_count(es_id: hid_t, num_errs: *mut size_t) -> herr_t; + pub fn H5ESget_err_info( + es_id: hid_t, num_err_info: size_t, err_info: *mut H5ES_err_info_t, + err_cleared: *mut size_t, + ) -> herr_t; + pub fn H5ESget_err_status(es_id: hid_t, err_occured: *mut hbool_t) -> herr_t; + pub fn H5ESget_op_counter(es_id: hid_t, counter: *mut u64) -> herr_t; + pub fn H5ESregister_complete_func( + es_id: hid_t, func: H5ES_event_complete_func_t, ctx: *mut c_void, + ) -> herr_t; + pub fn H5ESregister_insert_func( + es_id: hid_t, func: H5ES_event_insert_func_t, ctx: *mut c_void, + ) -> herr_t; + pub fn H5ESwait( + es_id: hid_t, timeout: u64, num_in_progress: *mut size_t, err_occured: *mut hbool_t, + ) -> herr_t; +} diff --git a/hdf5-sys/src/h5f.rs b/hdf5-sys/src/h5f.rs index 4aa9092ce..5f9096436 100644 --- a/hdf5-sys/src/h5f.rs +++ b/hdf5-sys/src/h5f.rs @@ -1,15 +1,26 @@ +//! Creating and manipulating HDF5 files use std::mem; pub use self::H5F_close_degree_t::*; pub use self::H5F_libver_t::*; pub use self::H5F_mem_t::*; pub use self::H5F_scope_t::*; +#[cfg(not(feature = "1.10.0"))] +pub use { + H5F_info1_t as H5F_info_t, H5F_info1_t__sohm as H5F_info_t__sohm, H5Fget_info1 as H5Fget_info, +}; +#[cfg(feature = "1.10.0")] +pub use { + H5F_info2_t as H5F_info_t, H5F_info2_t__free as H5F_info_t__free, + H5F_info2_t__sohm as H5F_info_t__sohm, H5F_info2_t__super as H5F_info_t__super, + H5Fget_info2 as H5Fget_info, +}; use crate::internal_prelude::*; use crate::h5ac::H5AC_cache_config_t; -#[cfg_attr(hdf5_1_10_0, deprecated(note = "deprecated in HDF5 1.10.0"))] +#[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0"))] pub const H5F_ACC_DEBUG: c_uint = 0x0000; /* these flags call H5check() in the C library */ @@ -53,33 +64,33 @@ pub enum H5F_close_degree_t { impl Default for H5F_close_degree_t { fn default() -> Self { - H5F_close_degree_t::H5F_CLOSE_DEFAULT + Self::H5F_CLOSE_DEFAULT } } -#[cfg_attr(hdf5_1_10_0, deprecated(note = "deprecated in HDF5 1.10.0, use H5F_info2_t"))] +#[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0, use H5F_info2_t"))] #[repr(C)] #[derive(Debug, Copy, Clone)] -pub struct H5F_info_t { +pub struct H5F_info1_t { pub super_ext_size: hsize_t, - pub sohm: H5F_info_t__sohm, + pub sohm: H5F_info1_t__sohm, } -impl Default for H5F_info_t { +impl Default for H5F_info1_t { fn default() -> Self { unsafe { mem::zeroed() } } } -#[cfg_attr(hdf5_1_10_0, deprecated(note = "deprecated in HDF5 1.10.0, use H5F_info2_t"))] +#[cfg_attr(feature = "1.10.0", deprecated(note = "deprecated in HDF5 1.10.0, use H5F_info2_t"))] #[repr(C)] #[derive(Debug, Copy, Clone)] -pub struct H5F_info_t__sohm { +pub struct H5F_info1_t__sohm { pub hdr_size: hsize_t, pub msgs_info: H5_ih_info_t, } -impl Default for H5F_info_t__sohm { +impl Default for H5F_info1_t__sohm { fn default() -> Self { unsafe { mem::zeroed() } } @@ -99,7 +110,7 @@ pub enum H5F_mem_t { H5FD_MEM_NTYPES = 7, } -#[cfg(not(hdf5_1_10_2))] +#[cfg(not(feature = "1.10.2"))] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_libver_t { @@ -107,7 +118,7 @@ pub enum H5F_libver_t { H5F_LIBVER_LATEST = 1, } -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5F_libver_t { @@ -115,10 +126,14 @@ pub enum H5F_libver_t { H5F_LIBVER_EARLIEST = 0, H5F_LIBVER_V18 = 1, H5F_LIBVER_V110 = 2, - H5F_LIBVER_NBOUNDS = 3, + #[cfg(feature = "1.12.0")] + H5F_LIBVER_V112 = 3, + #[cfg(feature = "1.13.0")] + H5F_LIBVER_V114 = 4, + H5F_LIBVER_NBOUNDS, } -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] pub const H5F_LIBVER_LATEST: H5F_libver_t = H5F_LIBVER_V110; impl Default for H5F_libver_t { @@ -127,11 +142,6 @@ impl Default for H5F_libver_t { } } -#[cfg(not(hdf5_1_10_0))] -extern "C" { - pub fn H5Fget_info(obj_id: hid_t, bh_info: *mut H5F_info_t) -> herr_t; -} - extern "C" { #[cfg_attr( hdf5_1_10_2, @@ -139,7 +149,7 @@ extern "C" { )] pub fn H5Fset_latest_format(file_id: hid_t, latest_format: hbool_t) -> herr_t; pub fn H5Fis_hdf5(filename: *const c_char) -> htri_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Fis_accessible(container_name: *const c_char, fapl_id: hid_t) -> htri_t; pub fn H5Fcreate( filename: *const c_char, flags: c_uint, create_plist: hid_t, access_plist: hid_t, @@ -148,12 +158,12 @@ extern "C" { pub fn H5Freopen(file_id: hid_t) -> hid_t; pub fn H5Fflush(object_id: hid_t, scope: H5F_scope_t) -> herr_t; pub fn H5Fclose(file_id: hid_t) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Fdelete(filename: *const c_char, fapl_id: hid_t) -> herr_t; pub fn H5Fget_create_plist(file_id: hid_t) -> hid_t; pub fn H5Fget_access_plist(file_id: hid_t) -> hid_t; pub fn H5Fget_intent(file_id: hid_t, intent: *mut c_uint) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Fget_fileno(file_id: hid_t, fileno: *mut c_ulong) -> herr_t; pub fn H5Fget_obj_count(file_id: hid_t, types: c_uint) -> ssize_t; pub fn H5Fget_obj_ids( @@ -165,7 +175,10 @@ extern "C" { pub fn H5Fget_freespace(file_id: hid_t) -> hssize_t; pub fn H5Fget_filesize(file_id: hid_t, size: *mut hsize_t) -> herr_t; pub fn H5Fget_mdc_config(file_id: hid_t, config_ptr: *mut H5AC_cache_config_t) -> herr_t; + #[cfg(not(feature = "1.13.0"))] pub fn H5Fset_mdc_config(file_id: hid_t, config_ptr: *mut H5AC_cache_config_t) -> herr_t; + #[cfg(feature = "1.13.0")] + pub fn H5Fset_mdc_config(file_id: hid_t, config_ptr: *const H5AC_cache_config_t) -> herr_t; pub fn H5Fget_mdc_hit_rate(file_id: hid_t, hit_rate_ptr: *mut c_double) -> herr_t; pub fn H5Fget_mdc_size( file_id: hid_t, max_size_ptr: *mut size_t, min_clean_size_ptr: *mut size_t, @@ -175,23 +188,23 @@ extern "C" { pub fn H5Fget_name(obj_id: hid_t, name: *mut c_char, size: size_t) -> ssize_t; } -#[cfg(hdf5_1_8_7)] +#[cfg(feature = "1.8.7")] extern "C" { pub fn H5Fclear_elink_file_cache(file_id: hid_t) -> herr_t; } -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] extern "C" { pub fn H5Fget_file_image(file_id: hid_t, buf_ptr: *mut c_void, buf_len: size_t) -> ssize_t; } -#[cfg(all(hdf5_1_8_9, h5_have_parallel))] +#[cfg(all(feature = "1.8.9", feature = "have-parallel"))] extern "C" { pub fn H5Fset_mpi_atomicity(file_id: hid_t, flag: hbool_t) -> herr_t; pub fn H5Fget_mpi_atomicity(file_id: hid_t, flag: *mut hbool_t) -> herr_t; } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] mod hdf5_1_10_0 { use super::*; @@ -308,20 +321,22 @@ mod hdf5_1_10_0 { ) -> ssize_t; pub fn H5Fformat_convert(fid: hid_t) -> herr_t; pub fn H5Fget_info2(obj_id: hid_t, finfo: *mut H5F_info2_t) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.0, use H5Fget_info2")] - pub fn H5Fget_info1(obj_id: hid_t, finfo: *mut H5F_info1_t) -> herr_t; } +} - pub use super::{ - H5F_info_t as H5F_info1_t, H5F_info_t__sohm as H5F_info1_t__sohm, - H5Fget_info1 as H5Fget_info, - }; +extern "C" { + #[cfg_attr( + feature = "1.10.0", + deprecated(note = "deprecated in HDF5 1.10.0, use H5Fget_info2") + )] + #[cfg_attr(not(feature = "1.10.0"), link_name = "H5Fget_info")] + pub fn H5Fget_info1(obj_id: hid_t, finfo: *mut H5F_info1_t) -> herr_t; } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] pub use self::hdf5_1_10_0::*; -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] mod hdf5_1_10_1 { use super::*; @@ -355,11 +370,35 @@ mod hdf5_1_10_1 { } } -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] pub use self::hdf5_1_10_1::*; -#[cfg(hdf5_1_10_5)] +#[cfg(feature = "1.10.5")] extern "C" { pub fn H5Fget_dset_no_attrs_hint(file_id: hid_t, minimize: *mut hbool_t) -> herr_t; pub fn H5Fset_dset_no_attrs_hint(file_id: hid_t, minimize: hbool_t) -> herr_t; } + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Fclose_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, file_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Fcreate_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, + filename: *const c_char, flags: c_uint, fcpl_id: hid_t, fapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Fflush_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, object_id: hid_t, + scope: H5F_scope_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Fopen_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, + filename: *const c_char, flags: c_uint, access_plit: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5reopen_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, file_id: hid_t, + es_id: hid_t, + ) -> hid_t; +} diff --git a/hdf5-sys/src/h5fd.rs b/hdf5-sys/src/h5fd.rs index ee29dc1ae..dfdc5bc34 100644 --- a/hdf5-sys/src/h5fd.rs +++ b/hdf5-sys/src/h5fd.rs @@ -1,3 +1,4 @@ +//! File drivers use std::mem; pub use self::H5FD_file_image_op_t::*; @@ -7,6 +8,7 @@ use crate::internal_prelude::*; use crate::h5f::{H5F_close_degree_t, H5F_mem_t}; +#[cfg(not(feature = "1.13.0"))] pub const H5_HAVE_VFL: c_uint = 1; pub const H5FD_VFD_DEFAULT: c_uint = 0; @@ -66,7 +68,7 @@ pub const H5FD_FEAT_DIRTY_SBLK_LOAD: c_uint = 0x00000040; pub const H5FD_FEAT_POSIX_COMPAT_HANDLE: c_uint = 0x00000080; pub const H5FD_FEAT_ALLOW_FILE_IMAGE: c_uint = 0x00000400; pub const H5FD_FEAT_CAN_USE_FILE_IMAGE_CALLBACKS: c_uint = 0x00000800; -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] pub const H5FD_FEAT_DEFAULT_VFD_COMPATIBLE: c_uint = 0x00008000; /* Flags for H5Pset_fapl_log() */ @@ -118,12 +120,18 @@ pub const H5FD_LOG_ALL: c_ulonglong = H5FD_LOG_FREE | H5FD_LOG_LOC_IO | H5FD_LOG_META_IO; +pub type H5FD_class_value_t = c_int; + #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5FD_class_t { + #[cfg(feature = "1.13.0")] + pub value: H5FD_class_value_t, pub name: *const c_char, pub maxaddr: haddr_t, pub fc_degree: H5F_close_degree_t, + #[cfg(feature = "1.13.0")] + pub terminate: Option herr_t>, pub sb_size: Option hsize_t>, pub sb_encode: Option herr_t>, @@ -206,6 +214,18 @@ pub struct H5FD_class_t { >, pub unlock: Option herr_t>, + #[cfg(feature = "1.13.0")] + pub del: Option herr_t>, + #[cfg(feature = "1.13.0")] + pub ctl: Option< + extern "C" fn( + file: *mut H5FD_t, + op_code: u64, + flags: u64, + input: *const c_char, + output: *mut *mut c_void, + ) -> herr_t, + >, pub fl_map: [H5FD_mem_t; 7usize], } @@ -220,7 +240,7 @@ impl Default for H5FD_class_t { pub struct H5FD_free_t { pub addr: haddr_t, pub size: hsize_t, - pub next: *mut H5FD_free_t, + pub next: *mut Self, } impl Default for H5FD_free_t { @@ -261,7 +281,7 @@ pub enum H5FD_file_image_op_t { H5FD_FILE_IMAGE_OP_FILE_CLOSE = 7, } -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5FD_file_image_callbacks_t { @@ -301,7 +321,7 @@ pub struct H5FD_file_image_callbacks_t { pub udata: *mut c_void, } -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] impl Default for H5FD_file_image_callbacks_t { fn default() -> Self { unsafe { mem::zeroed() } @@ -351,23 +371,116 @@ extern "C" { pub fn H5FD_multi_init() -> hid_t; } -#[cfg(h5_have_parallel)] +#[cfg(feature = "have-parallel")] extern "C" { pub fn H5FD_mpio_init() -> hid_t; } -#[cfg(h5_have_direct)] +#[cfg(feature = "have-direct")] extern "C" { pub fn H5FD_direct_init() -> hid_t; } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] extern "C" { pub fn H5FDlock(file: *mut H5FD_t, rw: hbool_t) -> herr_t; pub fn H5FDunlock(file: *mut H5FD_t) -> herr_t; } -#[cfg(hdf5_1_10_2)] +#[cfg(all(feature = "1.10.6", not(feature = "1.13.0")))] +pub mod hdfs { + use super::*; + pub const H5FD__CURR_HDFS_FAPL_T_VERSION: c_uint = 1; + pub const H5FD__HDFS_NODE_NAME_SPACE: c_uint = 128; + pub const H5FD__HDFS_USER_NAME_SPACE: c_uint = 128; + pub const H5FD__HDFS_KERB_CACHE_PATH_SPACE: c_uint = 128; + + #[repr(C)] + pub struct H5FD_hdfs_fapl_t { + version: i32, + namenode_name: [c_char; H5FD__HDFS_NODE_NAME_SPACE as usize + 1], + namenode_port: i32, + user_name: [c_char; H5FD__HDFS_USER_NAME_SPACE as usize + 1], + kerberos_ticket_cache: [c_char; H5FD__HDFS_KERB_CACHE_PATH_SPACE as usize + 1], + stream_buffer_size: i32, + } + + extern "C" { + pub fn H5FD_hdfs_init() -> hid_t; + pub fn H5Pget_fapl_hdfs(fapl_id: hid_t, fa: *mut H5FD_hdfs_fapl_t) -> herr_t; + pub fn H5Pset_fapl_hdfs(fapl_id: hid_t, fa: *mut H5FD_hdfs_fapl_t) -> herr_t; + } +} + +#[cfg(feature = "1.10.6")] +pub mod ros3 { + use super::*; + pub const H5FD_CURR_ROS3_FAPL_T_VERSION: c_uint = 1; + pub const H5FD_ROS3_MAX_REGION_LEN: c_uint = 128; + pub const H5FD_ROS3_MAX_SECRET_ID_LEN: c_uint = 128; + pub const H5FD_ROS3_MAX_SECRET_KEY_LEN: c_uint = 128; + + #[repr(C)] + pub struct H5FD_ros3_fapl_t { + version: i32, + authenticate: hbool_t, + aws_region: [c_char; H5FD_ROS3_MAX_REGION_LEN as usize + 1], + secret_id: [c_char; H5FD_ROS3_MAX_SECRET_ID_LEN as usize + 1], + secret_key: [c_char; H5FD_ROS3_MAX_SECRET_KEY_LEN as usize + 1], + } + + extern "C" { + pub fn H5FD_ros3_init() -> hid_t; + pub fn H5Pget_fapl_ros3(fapl_id: hid_t, fa: *mut H5FD_ros3_fapl_t) -> herr_t; + pub fn H5Pset_fapl_ros3(fapl_id: hid_t, fa: *mut H5FD_ros3_fapl_t) -> herr_t; + } +} + +#[cfg(any(all(feature = "1.10.7", not(feature = "1.12.0")), feature = "1.12.1"))] +pub mod splitter { + use super::*; + + pub const H5FD_CURR_SPLITTER_VFD_CONFIG_VERSION: c_uint = 1; + pub const H5FD_SPLITTER_PATH_MAX: c_uint = 4096; + pub const H5FD_SPLITTER_MAGIC: c_uint = 0x2B916880; + + #[repr(C)] + pub struct H5FD_splitter_vfg_config_t { + magic: i32, + version: c_uint, + rw_fapl_id: hid_t, + wo_fapl_id: hid_t, + wo_path: [c_char; H5FD_SPLITTER_PATH_MAX as usize + 1], + log_file_path: [c_char; H5FD_SPLITTER_PATH_MAX as usize + 1], + ignore_wo_errs: hbool_t, + } + + extern "C" { + pub fn H5FD_splitter_init() -> hid_t; + pub fn H5Pget_fapl_splitter( + fapl_id: hid_t, config_ptr: *mut H5FD_splitter_vfg_config_t, + ) -> herr_t; + pub fn H5Pset_fapl_splitter( + fapl_id: hid_t, config_ptr: *mut H5FD_splitter_vfg_config_t, + ) -> herr_t; + } +} + +#[cfg(feature = "1.10.2")] extern "C" { pub fn H5FDdriver_query(driver_id: hid_t, flags: *mut c_ulong) -> herr_t; } + +#[cfg(feature = "1.13.0")] +type H5FD_perform_init_func_t = Option hid_t>; + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5FDctl( + file: *mut H5FD_t, op_cod: u64, flags: u64, input: *const c_void, output: *mut *mut c_void, + ) -> herr_t; + pub fn H5FDdelete(name: *const c_char, fapl_id: hid_t) -> herr_t; + pub fn H5FDis_driver_registered_by_name(driver_name: *const c_char) -> htri_t; + pub fn H5FDis_driver_registered_by_value(driver_value: H5FD_class_value_t) -> htri_t; + pub fn H5FDperform_init(p: H5FD_perform_init_func_t) -> hid_t; +} diff --git a/hdf5-sys/src/h5g.rs b/hdf5-sys/src/h5g.rs index 57213d4c7..f80b17c19 100644 --- a/hdf5-sys/src/h5g.rs +++ b/hdf5-sys/src/h5g.rs @@ -1,10 +1,13 @@ +//! Creating and manipulating groups of objects inside an HDF5 file use std::mem; pub use self::H5G_storage_type_t::*; +pub use {H5Gcreate2 as H5Gcreate, H5Gopen2 as H5Gopen}; use crate::internal_prelude::*; use crate::h5l::{H5L_type_t, H5L_SAME_LOC, H5L_TYPE_ERROR, H5L_TYPE_HARD, H5L_TYPE_SOFT}; +use crate::h5o::H5O_stat_t; pub const H5G_SAME_LOC: hid_t = H5L_SAME_LOC; @@ -18,7 +21,7 @@ pub const H5G_NTYPES: c_uint = 256; pub const H5G_NLIBTYPES: c_uint = 8; pub const H5G_NUSERTYPES: c_uint = H5G_NTYPES - H5G_NLIBTYPES; -pub fn H5G_USERTYPE(X: c_uint) -> c_uint { +pub const fn H5G_USERTYPE(X: c_uint) -> c_uint { 8 + X } @@ -62,10 +65,104 @@ extern "C" { n: hsize_t, ginfo: *mut H5G_info_t, lapl_id: hid_t, ) -> herr_t; pub fn H5Gclose(group_id: hid_t) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Gcreate2")] + pub fn H5Gcreate1(loc_id: hid_t, name: *const c_char, size_hint: size_t) -> hid_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Oget_comment")] + pub fn H5Gget_comment( + loc_id: hid_t, name: *const c_char, bufsize: size_t, buf: *mut c_char, + ) -> c_int; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Lget_val")] + pub fn H5Gget_linkval(loc_id: hid_t, name: *const c_char, comment: *const c_char) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Gget_info")] + pub fn H5Gget_num_objs(loc_id: hid_t, num_objs: *mut hsize_t) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Oget_info or H5Lget_info")] + pub fn H5Gget_objinfo( + loc_id: hid_t, name: *const c_char, follow_link: hbool_t, statubuf: *mut H5G_stat_t, + ) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Lget_name_by_idx")] + pub fn H5Gget_objname_by_idx( + loc_id: hid_t, idx: hsize_t, name: *mut c_char, size: size_t, + ) -> ssize_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Oget_info")] + pub fn H5Gget_objtype_by_idx(loc_id: hid_t, idx: hsize_t) -> H5G_obj_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Lcreate_hard or H5Lcreate_soft")] + pub fn H5Glink( + cur_loc_id: hid_t, type_: H5G_link_t, cur_name: *const c_char, new_name: *const c_char, + ) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Lcreate_hard or H5Lcreate_soft")] + pub fn H5Glink2( + cur_loc_id: hid_t, cur_name: *const c_char, type_: H5G_link_t, new_loc_id: hid_t, + new_name: *const c_char, + ) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Lmove")] + pub fn H5Gmove(src_loc_id: hid_t, src_name: *const c_char, dst_name: *const c_char) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Lmove")] + pub fn H5Gmove2( + src_loc_id: hid_t, src_name: *const c_char, dst_loc_id: hid_t, dst_name: *const c_char, + ) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Gopen2")] + pub fn H5Gopen1(loc_id: hid_t, name: *const c_char) -> hid_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Ldelete")] + pub fn H5Gunlink(loc_id: hid_t, name: *const c_char) -> herr_t; } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] extern "C" { pub fn H5Gflush(group_id: hid_t) -> herr_t; pub fn H5Grefresh(group_id: hid_t) -> herr_t; } + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub enum H5G_obj_t { + H5G_UNKNOwN = -1, + H5G_GROUP, + H5G_DATASET, + H5G_TYPE, + H5G_LINK, + H5G_UDLINK, + H5G_RESERVED_5, + H5G_RESERVED_6, + H5G_RESERVED_7, +} + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +pub struct H5G_stat_t { + fileno: [c_ulong; 2], + objno: [c_ulong; 2], + nlink: c_uint, + type_: H5G_obj_t, + mtime: time_t, + linklen: size_t, + ohdr: H5O_stat_t, +} + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Gclose_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, group_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Gcreate_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, lcpl_id: hid_t, gcpl_id: hid_t, gapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Gget_info_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + ginfo: *mut H5G_info_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Gget_info_by_idx_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, n: c_ulong, + ginfo: *mut H5G_info_t, lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Gget_info_by_name_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, ginfo: *mut H5G_info_t, lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Gopen_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, gapl_id: hid_t, es_id: hid_t, + ) -> hid_t; +} diff --git a/hdf5-sys/src/h5i.rs b/hdf5-sys/src/h5i.rs index c46834d10..e9ed5a1c6 100644 --- a/hdf5-sys/src/h5i.rs +++ b/hdf5-sys/src/h5i.rs @@ -1,3 +1,4 @@ +//! Manipulating object identifiers and object names pub use self::H5I_type_t::*; use crate::internal_prelude::*; @@ -12,37 +13,43 @@ pub enum H5I_type_t { H5I_DATATYPE, H5I_DATASPACE, H5I_DATASET, - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] H5I_MAP, H5I_ATTR, - #[cfg(not(hdf5_1_12_0))] - #[cfg_attr(hdf5_1_10_2, deprecated(note = "deprecated in HDF5 1.10.2"))] + #[cfg(not(feature = "1.12.0"))] + #[cfg_attr(feature = "1.10.2", deprecated(note = "deprecated in HDF5 1.10.2"))] H5I_REFERENCE, H5I_VFL, - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] H5I_VOL, H5I_GENPROP_CLS, H5I_GENPROP_LST, H5I_ERROR_CLASS, H5I_ERROR_MSG, H5I_ERROR_STACK, - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] H5I_SPACE_SEL_ITER, + #[cfg(feature = "1.13.0")] + H5I_EVENTSET, H5I_NTYPES, } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] pub type hid_t = i64; -#[cfg(not(hdf5_1_10_0))] +#[cfg(not(feature = "1.10.0"))] pub type hid_t = c_int; pub const H5I_INVALID_HID: hid_t = -1; +#[cfg(not(feature = "1.13.0"))] pub type H5I_free_t = Option herr_t>; +#[cfg(feature = "1.13.0")] +pub type H5I_free_t = Option herr_t>; + pub type H5I_search_func_t = Option c_int>; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub type H5I_iterate_func_t = Option herr_t>; extern "C" { @@ -64,9 +71,24 @@ extern "C" { pub fn H5Idec_type_ref(type_: H5I_type_t) -> c_int; pub fn H5Iget_type_ref(type_: H5I_type_t) -> c_int; pub fn H5Isearch(type_: H5I_type_t, func: H5I_search_func_t, key: *mut c_void) -> *mut c_void; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Iiterate(type_: H5I_type_t, op: H5I_iterate_func_t, op_data: *mut c_void) -> herr_t; pub fn H5Inmembers(type_: H5I_type_t, num_members: *mut hsize_t) -> herr_t; pub fn H5Itype_exists(type_: H5I_type_t) -> htri_t; pub fn H5Iis_valid(id: hid_t) -> htri_t; } + +#[cfg(feature = "1.13.0")] +pub type H5I_future_realize_func_t = + Option herr_t>; + +#[cfg(feature = "1.13.0")] +pub type H5I_future_discard_func_t = Option herr_t>; + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Iregister_future( + type_: H5I_type_t, object: *const c_void, realize_cb: H5I_future_realize_func_t, + discard_cb: H5I_future_discard_func_t, + ) -> hid_t; +} diff --git a/hdf5-sys/src/h5l.rs b/hdf5-sys/src/h5l.rs index 7d1ebd812..c70d4fd9d 100644 --- a/hdf5-sys/src/h5l.rs +++ b/hdf5-sys/src/h5l.rs @@ -1,8 +1,9 @@ +//! Creating and manipulating links within an HDF5 group use std::mem; pub use self::H5L_type_t::*; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] use crate::h5o::H5O_token_t; use crate::internal_prelude::*; @@ -64,7 +65,7 @@ impl H5L_info1_t__u { #[repr(C)] #[derive(Debug, Copy, Clone)] -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub struct H5L_info2_t { pub type_: H5L_type_t, pub corder_valid: hbool_t, @@ -73,7 +74,7 @@ pub struct H5L_info2_t { pub u: H5L_info1_t__u, } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] impl Default for H5L_info2_t { fn default() -> Self { unsafe { mem::zeroed() } @@ -82,13 +83,13 @@ impl Default for H5L_info2_t { #[repr(C)] #[derive(Copy, Clone)] -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub union H5L_info2_t__u { token: H5O_token_t, val_size: size_t, } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] impl Default for H5L_info2_t__u { fn default() -> Self { unsafe { mem::zeroed() } @@ -175,7 +176,7 @@ pub type H5L_iterate1_t = Option< op_data: *mut c_void, ) -> herr_t, >; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub type H5L_iterate2_t = Option< extern "C" fn( group: hid_t, @@ -226,12 +227,15 @@ extern "C" { n: hsize_t, buf: *mut c_void, size: size_t, lapl_id: hid_t, ) -> herr_t; pub fn H5Lexists(loc_id: hid_t, name: *const c_char, lapl_id: hid_t) -> htri_t; - #[cfg_attr(hdf5_1_12_0, deprecated(note = "deprecated in HDF5 1.12.0, use H5Lget_info2()"))] - #[cfg_attr(not(hdf5_1_12_0), link_name = "H5Lget_info")] + #[cfg_attr( + feature = "1.12.0", + deprecated(note = "deprecated in HDF5 1.12.0, use H5Lget_info2()") + )] + #[cfg_attr(not(feature = "1.12.0"), link_name = "H5Lget_info")] pub fn H5Lget_info1( loc_id: hid_t, name: *const c_char, linfo: *mut H5L_info1_t, lapl_id: hid_t, ) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Lget_info2( loc_id: hid_t, name: *const c_char, linfo: *mut H5L_info2_t, lapl_id: hid_t, ) -> herr_t; @@ -239,12 +243,12 @@ extern "C" { hdf5_1_12_0, deprecated(note = "deprecated in HDF5 1.12.0, use H5Lget_info_by_idx2()") )] - #[cfg_attr(not(hdf5_1_12_0), link_name = "H5Lget_info_by_idx")] + #[cfg_attr(not(feature = "1.12.0"), link_name = "H5Lget_info_by_idx")] pub fn H5Lget_info_by_idx( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, n: hsize_t, linfo: *mut H5L_info1_t, lapl_id: hid_t, ) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Lget_info_by_idx2( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, n: hsize_t, linfo: *mut H5L_info2_t, lapl_id: hid_t, @@ -253,13 +257,16 @@ extern "C" { loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, n: hsize_t, name: *mut c_char, size: size_t, lapl_id: hid_t, ) -> ssize_t; - #[cfg_attr(hdf5_1_12_0, deprecated(note = "deprecated in HDF5 1.12.0, use H5Literate2()"))] - #[cfg_attr(not(hdf5_1_12_0), link_name = "H5Literate")] + #[cfg_attr( + feature = "1.12.0", + deprecated(note = "deprecated in HDF5 1.12.0, use H5Literate2()") + )] + #[cfg_attr(not(feature = "1.12.0"), link_name = "H5Literate")] pub fn H5Literate1( grp_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, idx: *mut hsize_t, op: H5L_iterate1_t, op_data: *mut c_void, ) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Literate2( grp_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, idx: *mut hsize_t, op: H5L_iterate2_t, op_data: *mut c_void, @@ -268,23 +275,23 @@ extern "C" { hdf5_1_12_0, deprecated(note = "deprecated in HDF5 1.12.0, use H5Literate_by_name2()") )] - #[cfg_attr(not(hdf5_1_12_0), link_name = "H5Literate_by_name")] + #[cfg_attr(not(feature = "1.12.0"), link_name = "H5Literate_by_name")] pub fn H5Literate_by_name1( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, idx: *mut hsize_t, op: H5L_iterate1_t, op_data: *mut c_void, lapl_id: hid_t, ) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Literate_by_name2( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, idx: *mut hsize_t, op: H5L_iterate2_t, op_data: *mut c_void, lapl_id: hid_t, ) -> herr_t; - #[cfg_attr(hdf5_1_12_0, deprecated(note = "deprecated in HDF5 1.12.0, use H5Lvisit2()"))] - #[cfg_attr(not(hdf5_1_12_0), link_name = "H5Lvisit")] + #[cfg_attr(feature = "1.12.0", deprecated(note = "deprecated in HDF5 1.12.0, use H5Lvisit2()"))] + #[cfg_attr(not(feature = "1.12.0"), link_name = "H5Lvisit")] pub fn H5Lvisit1( grp_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5L_iterate1_t, op_data: *mut c_void, ) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Lvisit2( grp_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5L_iterate2_t, op_data: *mut c_void, @@ -293,12 +300,12 @@ extern "C" { hdf5_1_12_0, deprecated(note = "deprecated in HDF5 1.12.0, use H5Lvisit_by_name2()") )] - #[cfg_attr(not(hdf5_1_12_0), link_name = "H5Lvisit_by_name")] + #[cfg_attr(not(feature = "1.12.0"), link_name = "H5Lvisit_by_name")] pub fn H5Lvisit_by_name1( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, op: H5L_iterate1_t, op_data: *mut c_void, lapl_id: hid_t, ) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Lvisit_by_name2( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, op: H5L_iterate2_t, op_data: *mut c_void, lapl_id: hid_t, @@ -320,11 +327,44 @@ extern "C" { ) -> herr_t; } -#[cfg(not(hdf5_1_12_0))] +#[cfg(not(feature = "1.12.0"))] pub use self::{ H5L_info1_t as H5L_info_t, H5L_iterate1_t as H5L_iterate_t, H5Literate1 as H5Literate, }; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub use self::{ H5L_info2_t as H5L_info_t, H5L_iterate2_t as H5L_iterate_t, H5Literate2 as H5Literate, }; + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Lcreate_hard_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, cur_loc_id: hid_t, + cur_name: *const c_char, new_loc_id: hid_t, new_name: *const c_char, lcpl_id: hid_t, + lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Lcreate_soft_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, + link_target: *const c_char, link_loc_id: hid_t, link_name: *const c_char, lcpl_id: hid_t, + lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Ldelete_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Ldelete_by_idx_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, n: c_ulong, + lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Lexists_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, exists: *mut hbool_t, lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Literate_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, group_id: hid_t, + idx_type: H5_index_t, order: H5_iter_order_t, idx_p: *mut c_ulong, op: H5L_iterate2_t, + op_data: *mut c_void, es_id: hid_t, + ) -> herr_t; + +} diff --git a/hdf5-sys/src/h5mm.rs b/hdf5-sys/src/h5mm.rs index 7702099e7..8a0e861a8 100644 --- a/hdf5-sys/src/h5mm.rs +++ b/hdf5-sys/src/h5mm.rs @@ -1,3 +1,4 @@ +//! Memory managment use crate::internal_prelude::*; pub type H5MM_allocate_t = diff --git a/hdf5-sys/src/h5o.rs b/hdf5-sys/src/h5o.rs index 4c9888704..eae91e04f 100644 --- a/hdf5-sys/src/h5o.rs +++ b/hdf5-sys/src/h5o.rs @@ -1,7 +1,21 @@ +//! Manipulating objects in an HDF5 file use std::mem; pub use self::H5O_mcdt_search_ret_t::*; pub use self::H5O_type_t::*; +#[cfg(not(feature = "1.12.0"))] +pub use { + H5O_info1_t as H5O_info_t, H5O_info1_t__meta_size as H5O_info_t_meta_size, + H5O_iterate1_t as H5O_iterate_t, +}; +#[cfg(feature = "1.12.0")] +pub use {H5O_info2_t as H5O_info_t, H5O_iterate2_t as H5O_iterate_t}; +#[cfg(not(feature = "1.10.3"))] +pub use { + H5Oget_info1 as H5Oget_info, H5Oget_info_by_idx1 as H5Oget_info_by_idx, + H5Oget_info_by_name1 as H5Oget_info_by_name, H5Ovisit1 as H5Ovisit, + H5Ovisit_by_name1 as H5Ovisit_by_name, +}; use crate::internal_prelude::*; @@ -11,11 +25,11 @@ pub const H5O_COPY_EXPAND_EXT_LINK_FLAG: c_uint = 0x0004; pub const H5O_COPY_EXPAND_REFERENCE_FLAG: c_uint = 0x0008; pub const H5O_COPY_WITHOUT_ATTR_FLAG: c_uint = 0x0010; pub const H5O_COPY_PRESERVE_NULL_FLAG: c_uint = 0x0020; -#[cfg(not(hdf5_1_8_9))] +#[cfg(not(feature = "1.8.9"))] pub const H5O_COPY_ALL: c_uint = 0x003F; -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] pub const H5O_COPY_MERGE_COMMITTED_DTYPE_FLAG: c_uint = 0x0040; -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] pub const H5O_COPY_ALL: c_uint = 0x007F; pub const H5O_SHMESG_NONE_FLAG: c_uint = 0x0000; @@ -44,27 +58,27 @@ pub const H5O_HDR_ALL_FLAGS: c_uint = H5O_HDR_CHUNK0_SIZE pub const H5O_SHMESG_MAX_NINDEXES: c_uint = 8; pub const H5O_SHMESG_MAX_LIST_SIZE: c_uint = 5000; -#[cfg(hdf5_1_10_3)] +#[cfg(feature = "1.10.3")] pub const H5O_INFO_BASIC: c_uint = 0x0001; -#[cfg(hdf5_1_10_3)] +#[cfg(feature = "1.10.3")] pub const H5O_INFO_TIME: c_uint = 0x0002; -#[cfg(hdf5_1_10_3)] +#[cfg(feature = "1.10.3")] pub const H5O_INFO_NUM_ATTRS: c_uint = 0x0004; -#[cfg(all(hdf5_1_10_3, not(hdf5_1_12_0)))] +#[cfg(all(feature = "1.10.3", not(feature = "1.12.0")))] pub const H5O_INFO_HDR: c_uint = 0x0008; -#[cfg(all(hdf5_1_10_3, not(hdf5_1_12_0)))] +#[cfg(all(feature = "1.10.3", not(feature = "1.12.0")))] pub const H5O_INFO_META_SIZE: c_uint = 0x0010; -#[cfg(all(hdf5_1_10_3, not(hdf5_1_12_0)))] +#[cfg(all(feature = "1.10.3", not(feature = "1.12.0")))] pub const H5O_INFO_ALL: c_uint = H5O_INFO_BASIC | H5O_INFO_TIME | H5O_INFO_NUM_ATTRS | H5O_INFO_HDR | H5O_INFO_META_SIZE; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub const H5O_INFO_ALL: c_uint = H5O_INFO_BASIC | H5O_INFO_TIME | H5O_INFO_NUM_ATTRS; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub const H5O_NATIVE_INFO_HDR: c_uint = 0x0008; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub const H5O_NATIVE_INFO_META_SIZE: c_uint = 0x0010; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub const H5O_NATIVE_INFO_ALL: c_uint = H5O_NATIVE_INFO_HDR | H5O_NATIVE_INFO_META_SIZE; #[repr(C)] @@ -74,7 +88,7 @@ pub enum H5O_type_t { H5O_TYPE_GROUP, H5O_TYPE_DATASET, H5O_TYPE_NAMED_DATATYPE, - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] H5O_TYPE_MAP, H5O_TYPE_NTYPES, } @@ -126,7 +140,7 @@ impl Default for H5O_hdr_info_t__mesg { #[repr(C)] #[derive(Debug, Copy, Clone)] -pub struct H5O_info_t { +pub struct H5O_info1_t { pub fileno: c_ulong, pub addr: haddr_t, pub type_: H5O_type_t, @@ -137,10 +151,10 @@ pub struct H5O_info_t { pub btime: time_t, pub num_attrs: hsize_t, pub hdr: H5O_hdr_info_t, - pub meta_size: H5O_info_t__meta_size, + pub meta_size: H5O_info1_t__meta_size, } -impl Default for H5O_info_t { +impl Default for H5O_info1_t { fn default() -> Self { unsafe { mem::zeroed() } } @@ -148,13 +162,12 @@ impl Default for H5O_info_t { #[repr(C)] #[derive(Debug, Copy, Clone)] -pub struct H5O_info_t__meta_size { +pub struct H5O_info1_t__meta_size { pub obj: H5_ih_info_t, pub attr: H5_ih_info_t, } -#[cfg(not(hdf5_1_12_0))] -impl Default for H5O_info_t__meta_size { +impl Default for H5O_info1_t__meta_size { fn default() -> Self { unsafe { mem::zeroed() } } @@ -162,16 +175,16 @@ impl Default for H5O_info_t__meta_size { pub type H5O_msg_crt_idx_t = uint32_t; -pub type H5O_iterate_t = Option< +pub type H5O_iterate1_t = Option< extern "C" fn( obj: hid_t, name: *const c_char, - info: *const H5O_info_t, + info: *const H5O_info1_t, op_data: *mut c_void, ) -> herr_t, >; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub type H5O_iterate2_t = Option< extern "C" fn( obj: hid_t, @@ -189,27 +202,32 @@ pub enum H5O_mcdt_search_ret_t { H5O_MCDT_SEARCH_STOP = 1, } -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] pub type H5O_mcdt_search_cb_t = Option H5O_mcdt_search_ret_t>; -#[cfg(not(hdf5_1_10_3))] +#[cfg(not(feature = "1.10.3"))] extern "C" { - pub fn H5Oget_info(loc_id: hid_t, oinfo: *mut H5O_info_t) -> herr_t; - pub fn H5Oget_info_by_name( - loc_id: hid_t, name: *const c_char, oinfo: *mut H5O_info_t, lapl_id: hid_t, + #[link_name = "H5Oget_info"] + pub fn H5Oget_info1(loc_id: hid_t, oinfo: *mut H5O_info1_t) -> herr_t; + #[link_name = "H5Oget_info_by_name"] + pub fn H5Oget_info_by_name1( + loc_id: hid_t, name: *const c_char, oinfo: *mut H5O_info1_t, lapl_id: hid_t, ) -> herr_t; - pub fn H5Oget_info_by_idx( + #[link_name = "H5Oget_info_by_idx"] + pub fn H5Oget_info_by_idx1( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, - n: hsize_t, oinfo: *mut H5O_info_t, lapl_id: hid_t, + n: hsize_t, oinfo: *mut H5O_info1_t, lapl_id: hid_t, ) -> herr_t; - pub fn H5Ovisit( - obj_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5O_iterate_t, + #[link_name = "H5Ovisit"] + pub fn H5Ovisit1( + obj_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5O_iterate1_t, op_data: *mut c_void, ) -> herr_t; - pub fn H5Ovisit_by_name( + #[link_name = "H5Ovisit_by_name"] + pub fn H5Ovisit_by_name1( loc_id: hid_t, obj_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, - op: H5O_iterate_t, op_data: *mut c_void, lapl_id: hid_t, + op: H5O_iterate1_t, op_data: *mut c_void, lapl_id: hid_t, ) -> herr_t; } @@ -229,7 +247,9 @@ extern "C" { src_loc_id: hid_t, src_name: *const c_char, dst_loc_id: hid_t, dst_name: *const c_char, ocpypl_id: hid_t, lcpl_id: hid_t, ) -> herr_t; + #[deprecated(note = "function is deprecated in favor of object attributes")] pub fn H5Oset_comment(obj_id: hid_t, comment: *const c_char) -> herr_t; + #[deprecated(note = "function is deprecated in favor of object attributes")] pub fn H5Oset_comment_by_name( loc_id: hid_t, name: *const c_char, comment: *const c_char, lapl_id: hid_t, ) -> herr_t; @@ -240,15 +260,15 @@ extern "C" { pub fn H5Oclose(object_id: hid_t) -> herr_t; } -#[cfg(hdf5_1_8_5)] +#[cfg(feature = "1.8.5")] use crate::h5::htri_t; -#[cfg(hdf5_1_8_5)] +#[cfg(feature = "1.8.5")] extern "C" { pub fn H5Oexists_by_name(loc_id: hid_t, name: *const c_char, lapl_id: hid_t) -> htri_t; } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] extern "C" { pub fn H5Odisable_mdc_flushes(object_id: hid_t) -> herr_t; pub fn H5Oenable_mdc_flushes(object_id: hid_t) -> herr_t; @@ -257,52 +277,52 @@ extern "C" { pub fn H5Orefresh(oid: hid_t) -> herr_t; } -#[cfg(hdf5_1_10_3)] +#[cfg(feature = "1.10.3")] mod hdf5_1_10_3 { use super::*; extern "C" { - pub fn H5Oget_info2(loc_id: hid_t, oinfo: *mut H5O_info_t, fields: c_uint) -> herr_t; + pub fn H5Oget_info2(loc_id: hid_t, oinfo: *mut H5O_info1_t, fields: c_uint) -> herr_t; pub fn H5Oget_info_by_name2( - loc_id: hid_t, name: *const c_char, oinfo: *mut H5O_info_t, fields: c_uint, + loc_id: hid_t, name: *const c_char, oinfo: *mut H5O_info1_t, fields: c_uint, lapl_id: hid_t, ) -> herr_t; pub fn H5Oget_info_by_idx2( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, - n: hsize_t, oinfo: *mut H5O_info_t, fields: c_uint, lapl_id: hid_t, + n: hsize_t, oinfo: *mut H5O_info1_t, fields: c_uint, lapl_id: hid_t, ) -> herr_t; pub fn H5Ovisit2( - obj_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5O_iterate_t, + obj_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5O_iterate1_t, op_data: *mut c_void, fields: c_uint, ) -> herr_t; pub fn H5Ovisit_by_name2( loc_id: hid_t, obj_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, - op: H5O_iterate_t, op_data: *mut c_void, fields: c_uint, lapl_id: hid_t, + op: H5O_iterate1_t, op_data: *mut c_void, fields: c_uint, lapl_id: hid_t, ) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info2()")] - pub fn H5Oget_info1(loc_id: hid_t, oinfo: *mut H5O_info_t) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info_by_name2()")] + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info2")] + pub fn H5Oget_info1(loc_id: hid_t, oinfo: *mut H5O_info1_t) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info_by_name2")] pub fn H5Oget_info_by_name1( - loc_id: hid_t, name: *const c_char, oinfo: *mut H5O_info_t, lapl_id: hid_t, + loc_id: hid_t, name: *const c_char, oinfo: *mut H5O_info1_t, lapl_id: hid_t, ) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info_by_idx2()")] + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info_by_idx2")] pub fn H5Oget_info_by_idx1( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, - n: hsize_t, oinfo: *mut H5O_info_t, lapl_id: hid_t, + n: hsize_t, oinfo: *mut H5O_info1_t, lapl_id: hid_t, ) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Ovisit2()")] + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Ovisit2")] pub fn H5Ovisit1( - obj_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5O_iterate_t, + obj_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5O_iterate1_t, op_data: *mut c_void, ) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Ovisit_by_name2()")] + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Ovisit_by_name2")] pub fn H5Ovisit_by_name1( loc_id: hid_t, obj_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, - op: H5O_iterate_t, op_data: *mut c_void, lapl_id: hid_t, + op: H5O_iterate1_t, op_data: *mut c_void, lapl_id: hid_t, ) -> herr_t; } - #[cfg(not(hdf5_1_10_5))] + #[cfg(not(feature = "1.10.5"))] pub use self::{ H5Oget_info1 as H5Oget_info, H5Oget_info_by_idx1 as H5Oget_info_by_idx, H5Oget_info_by_name1 as H5Oget_info_by_name, H5Ovisit1 as H5Ovisit, @@ -310,70 +330,70 @@ mod hdf5_1_10_3 { }; } -#[cfg(hdf5_1_10_3)] +#[cfg(feature = "1.10.3")] pub use self::hdf5_1_10_3::*; -#[cfg(hdf5_1_10_5)] +#[cfg(feature = "1.10.5")] extern "C" { // They've messed up when introducing compatibility macros which broke ABI compatibility; // in 1.10.5 those APIs were copied over to old names in order to be compatible with // older library versions - so we can link to them directly again. - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info2()")] - pub fn H5Oget_info(loc_id: hid_t, oinfo: *mut H5O_info_t) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info_by_name2()")] + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info2")] + pub fn H5Oget_info(loc_id: hid_t, oinfo: *mut H5O_info1_t) -> herr_t; + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info_by_name2")] pub fn H5Oget_info_by_name( - loc_id: hid_t, name: *const c_char, oinfo: *mut H5O_info_t, lapl_id: hid_t, + loc_id: hid_t, name: *const c_char, oinfo: *mut H5O_info1_t, lapl_id: hid_t, ) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info_by_idx2()")] + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Oget_info_by_idx2")] pub fn H5Oget_info_by_idx( loc_id: hid_t, group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, - n: hsize_t, oinfo: *mut H5O_info_t, lapl_id: hid_t, + n: hsize_t, oinfo: *mut H5O_info1_t, lapl_id: hid_t, ) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Ovisit2()")] + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Ovisit2")] pub fn H5Ovisit( - obj_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5O_iterate_t, + obj_id: hid_t, idx_type: H5_index_t, order: H5_iter_order_t, op: H5O_iterate1_t, op_data: *mut c_void, ) -> herr_t; - #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Ovisit_by_name2()")] + #[deprecated(note = "deprecated in HDF5 1.10.3, use H5Ovisit_by_name2")] pub fn H5Ovisit_by_name( loc_id: hid_t, obj_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, - op: H5O_iterate_t, op_data: *mut c_void, lapl_id: hid_t, + op: H5O_iterate1_t, op_data: *mut c_void, lapl_id: hid_t, ) -> herr_t; } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub const H5O_MAX_TOKEN_SIZE: usize = 16; #[repr(C)] -#[derive(Debug, Copy, Clone)] -#[cfg(hdf5_1_12_0)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[cfg(feature = "1.12.0")] pub struct H5O_token_t { __data: [u8; H5O_MAX_TOKEN_SIZE], } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] impl Default for H5O_token_t { fn default() -> Self { *H5O_TOKEN_UNDEF } } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5O_info2_t { - fileno: c_ulong, - token: H5O_token_t, - type_: H5O_type_t, - rc: c_uint, - atime: time_t, - mtime: time_t, - ctime: time_t, - btime: time_t, - num_attrs: hsize_t, + pub fileno: c_ulong, + pub token: H5O_token_t, + pub type_: H5O_type_t, + pub rc: c_uint, + pub atime: time_t, + pub mtime: time_t, + pub ctime: time_t, + pub btime: time_t, + pub num_attrs: hsize_t, } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5O_native_info_meta_size_t { @@ -381,7 +401,7 @@ pub struct H5O_native_info_meta_size_t { attr: H5_ih_info_t, } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5O_native_info_t { @@ -389,7 +409,16 @@ pub struct H5O_native_info_t { meta_size: H5O_native_info_meta_size_t, } -#[cfg(hdf5_1_12_0)] +#[derive(Debug, Copy, Clone)] +#[repr(C)] +pub struct H5O_stat_t { + size: hsize_t, + free: hsize_t, + nmesgs: c_uint, + nchunks: c_uint, +} + +#[cfg(feature = "1.12.0")] extern "C" { pub fn H5Oget_info3(loc_id: hid_t, oinfo: *mut H5O_info2_t, fields: c_uint) -> herr_t; pub fn H5Oget_info_by_idx3( @@ -431,18 +460,52 @@ extern "C" { ) -> herr_t; } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub use self::globals::*; -#[cfg(all(not(all(target_env = "msvc", not(feature = "static"))), hdf5_1_12_0))] +#[cfg(all(not(all(target_env = "msvc", not(feature = "static"))), feature = "1.12.0"))] mod globals { use super::H5O_token_t as id_t; extern_static!(H5O_TOKEN_UNDEF, H5O_TOKEN_UNDEF_g); } -#[cfg(all(target_env = "msvc", not(feature = "static"), hdf5_1_12_0))] +#[cfg(all(target_env = "msvc", not(feature = "static"), feature = "1.12.0"))] mod globals { // TODO: special DLL handling? use super::H5O_token_t as id_t; extern_static!(H5O_TOKEN_UNDEF, __imp_H5O_TOKEN_UNDEF_g); } + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Oclose_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, object_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Ocopy_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, src_loc_id: hid_t, + src_name: *const c_char, dst_loc_id: hid_t, dst_name: *const c_char, ocpypl_id: hid_t, + lcpl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Oflush_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, obj_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Oget_info_by_name_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, oinfo: *mut H5O_info2_t, fields: c_uint, lapl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5Oopen_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, lapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Oopen_by_idx_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + group_name: *const c_char, idx_type: H5_index_t, order: H5_iter_order_t, n: c_ulong, + lapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Orefresh_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, oid: hid_t, + es_id: hid_t, + ) -> herr_t; +} diff --git a/hdf5-sys/src/h5p.rs b/hdf5-sys/src/h5p.rs index b67eb3712..c251eb30a 100644 --- a/hdf5-sys/src/h5p.rs +++ b/hdf5-sys/src/h5p.rs @@ -1,3 +1,4 @@ +//! Creating and manipulating property lists to control HDF5 library behaviour use crate::internal_prelude::*; use crate::h5ac::H5AC_cache_config_t; @@ -9,13 +10,16 @@ use crate::h5mm::{H5MM_allocate_t, H5MM_free_t}; use crate::h5t::{H5T_conv_except_func_t, H5T_cset_t}; use crate::h5z::{H5Z_EDC_t, H5Z_SO_scale_type_t, H5Z_filter_func_t, H5Z_filter_t}; -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] use crate::h5fd::H5FD_file_image_callbacks_t; -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] use crate::h5o::H5O_mcdt_search_cb_t; -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] use crate::{h5ac::H5AC_cache_image_config_t, h5f::H5F_fspace_strategy_t}; +#[cfg(feature = "1.13.0")] +use crate::{h5fd::H5FD_class_value_t, h5s::H5S_seloper_t}; + pub const H5P_CRT_ORDER_TRACKED: c_uint = 0x0001; pub const H5P_CRT_ORDER_INDEXED: c_uint = 0x0002; @@ -45,7 +49,7 @@ pub type H5P_iterate_t = pub use self::globals::*; -#[cfg(all(not(hdf5_1_8_14), not(all(target_env = "msvc", not(feature = "static")))))] +#[cfg(all(not(feature = "1.8.14"), not(all(target_env = "msvc", not(feature = "static")))))] mod globals { pub use crate::h5i::hid_t as id_t; @@ -85,7 +89,7 @@ mod globals { extern_static!(H5P_LST_LINK_ACCESS, H5P_LST_LINK_ACCESS_g); } -#[cfg(all(hdf5_1_8_14, not(all(target_env = "msvc", not(feature = "static")))))] +#[cfg(all(feature = "1.8.14", not(all(target_env = "msvc", not(feature = "static")))))] mod globals { pub use crate::h5i::hid_t as id_t; @@ -124,7 +128,7 @@ mod globals { extern_static!(H5P_LST_LINK_CREATE, H5P_LST_LINK_CREATE_ID_g); extern_static!(H5P_LST_LINK_ACCESS, H5P_LST_LINK_ACCESS_ID_g); - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] #[allow(clippy::module_inception)] pub mod globals { use super::*; @@ -137,11 +141,11 @@ mod globals { extern_static!(H5P_VOL_INITIALIZE_DEFAULT, H5P_LST_VOL_INITIALIZE_ID_g); extern_static!(H5P_REFERENCE_ACCESS_DEFAULT, H5P_LST_REFERENCE_ACCESS_ID_g); } - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub use globals::*; } -#[cfg(all(not(hdf5_1_8_14), all(target_env = "msvc", not(feature = "static"))))] +#[cfg(all(not(feature = "1.8.14"), all(target_env = "msvc", not(feature = "static"))))] mod globals { // dllimport hack pub type id_t = usize; @@ -182,7 +186,7 @@ mod globals { extern_static!(H5P_LST_LINK_ACCESS, __imp_H5P_LST_LINK_ACCESS_g); } -#[cfg(all(hdf5_1_8_14, all(target_env = "msvc", not(feature = "static"))))] +#[cfg(all(feature = "1.8.14", all(target_env = "msvc", not(feature = "static"))))] mod globals { // dllimport hack pub type id_t = usize; @@ -222,7 +226,7 @@ mod globals { extern_static!(H5P_LST_LINK_CREATE, __imp_H5P_LST_LINK_CREATE_ID_g); extern_static!(H5P_LST_LINK_ACCESS, __imp_H5P_LST_LINK_ACCESS_ID_g); - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] #[allow(clippy::module_inception)] pub mod globals { use super::*; @@ -235,7 +239,7 @@ mod globals { extern_static!(H5P_VOL_INITIALIZE_DEFAULT, __imp_H5P_LST_VOL_INITIALIZE_ID_g); extern_static!(H5P_REFERENCE_ACCESS_DEFAULT, __imp_H5P_LST_REFERENCE_ACCESS_ID_g); } - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub use globals::*; } @@ -466,7 +470,10 @@ extern "C" { ) -> herr_t; pub fn H5Pset_copy_object(plist_id: hid_t, crt_intmd: c_uint) -> herr_t; pub fn H5Pget_copy_object(plist_id: hid_t, crt_intmd: *mut c_uint) -> herr_t; - #[cfg_attr(hdf5_1_10_0, deprecated(note = "deprecated in HDF5 1.10.0, use H5Fget_info2()"))] + #[cfg_attr( + feature = "1.10.0", + deprecated(note = "deprecated in HDF5 1.10.0, use H5Fget_info2()") + )] pub fn H5Pget_version( plist_id: hid_t, boot: *mut c_uint, freelist: *mut c_uint, stab: *mut c_uint, shhdr: *mut c_uint, @@ -527,17 +534,17 @@ extern "C" { ) -> herr_t; // direct - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] pub fn H5Pset_fapl_direct( fapl_id: hid_t, alignment: size_t, block_size: size_t, cbuf_size: size_t, ) -> herr_t; - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] pub fn H5Pget_fapl_direct( fapl_id: hid_t, alignment: *mut size_t, block_size: *mut size_t, cbuf_size: *mut size_t, ) -> herr_t; } -#[cfg(h5_have_parallel)] +#[cfg(feature = "have-parallel")] mod mpio { use crate::internal_prelude::*; @@ -583,7 +590,7 @@ mod mpio { } } -#[cfg(h5_have_parallel)] +#[cfg(feature = "have-parallel")] pub use self::mpio::*; #[cfg(target_os = "windows")] @@ -591,13 +598,13 @@ extern "C" { pub fn H5Pset_fapl_windows(fapl_id: hid_t) -> herr_t; } -#[cfg(hdf5_1_8_7)] +#[cfg(feature = "1.8.7")] extern "C" { pub fn H5Pset_elink_file_cache_size(plist_id: hid_t, efc_size: c_uint) -> herr_t; pub fn H5Pget_elink_file_cache_size(plist_id: hid_t, efc_size: *mut c_uint) -> herr_t; } -#[cfg(hdf5_1_8_9)] +#[cfg(feature = "1.8.9")] extern "C" { pub fn H5Pset_file_image(fapl_id: hid_t, buf_ptr: *mut c_void, buf_len: size_t) -> herr_t; pub fn H5Pget_file_image( @@ -619,7 +626,7 @@ extern "C" { pub fn H5Pfree_merge_committed_dtype_paths(plist_id: hid_t) -> herr_t; } -#[cfg(hdf5_1_8_13)] +#[cfg(feature = "1.8.13")] extern "C" { pub fn H5Pset_core_write_tracking( fapl_id: hid_t, is_enabled: hbool_t, page_size: size_t, @@ -629,19 +636,19 @@ extern "C" { ) -> herr_t; } -#[cfg(hdf5_1_8_17)] +#[cfg(feature = "1.8.17")] extern "C" { pub fn H5Pset_efile_prefix(dapl_id: hid_t, prefix: *const c_char) -> herr_t; pub fn H5Pget_efile_prefix(dapl_id: hid_t, prefix: *const c_char, size: size_t) -> ssize_t; } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] use crate::{ h5d::{H5D_append_cb_t, H5D_vds_view_t}, h5f::{H5F_file_space_type_t, H5F_flush_cb_t}, }; -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] extern "C" { pub fn H5Pset_append_flush( plist_id: hid_t, ndims: c_uint, boundary: *const hsize_t, func: H5D_append_cb_t, @@ -688,10 +695,13 @@ extern "C" { pub fn H5Pset_virtual_view(plist_id: hid_t, view: H5D_vds_view_t) -> herr_t; pub fn H5Pget_chunk_opts(plist_id: hid_t, opts: *mut c_uint) -> herr_t; pub fn H5Pset_chunk_opts(plist_id: hid_t, opts: c_uint) -> herr_t; - #[cfg_attr(hdf5_1_12_0, deprecated(note = "deprecated in HDF5 1.12.0, use H5Pencode2()"))] - #[cfg_attr(not(hdf5_1_12_0), link_name = "H5Pencode")] + #[cfg_attr( + feature = "1.12.0", + deprecated(note = "deprecated in HDF5 1.12.0, use H5Pencode2()") + )] + #[cfg_attr(not(feature = "1.12.0"), link_name = "H5Pencode")] pub fn H5Pencode1(plist_id: hid_t, buf: *mut c_void, nalloc: *mut size_t) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Pencode2( plist_id: hid_t, buf: *mut c_void, nalloc: *mut size_t, fapl_id: hid_t, ) -> herr_t; @@ -712,12 +722,12 @@ extern "C" { ) -> herr_t; } -#[cfg(all(hdf5_1_10_0, not(hdf5_1_12_0)))] +#[cfg(all(feature = "1.10.0", not(feature = "1.12.0")))] pub use self::H5Pencode1 as H5Pencode; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub use self::H5Pencode2 as H5Pencode; -#[cfg(all(hdf5_1_10_0, h5_have_parallel))] +#[cfg(all(feature = "1.10.0", feature = "have-parallel"))] extern "C" { pub fn H5Pset_coll_metadata_write(fapl_id: hid_t, is_collective: hbool_t) -> herr_t; pub fn H5Pget_coll_metadata_write(fapl_id: hid_t, is_collective: *mut hbool_t) -> herr_t; @@ -725,7 +735,7 @@ extern "C" { pub fn H5Pget_all_coll_metadata_ops(accpl_id: hid_t, is_collective: *mut hbool_t) -> herr_t; } -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] extern "C" { pub fn H5Pset_evict_on_close(fapl_id: hid_t, evict_on_close: hbool_t) -> herr_t; pub fn H5Pget_evict_on_close(fapl_id: hid_t, evict_on_close: *mut hbool_t) -> herr_t; @@ -752,21 +762,50 @@ extern "C" { pub fn H5Pget_file_space_page_size(plist_id: hid_t, fsp_size: *mut hsize_t) -> herr_t; } -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] extern "C" { pub fn H5Pset_virtual_prefix(dapl_id: hid_t, prefix: *const c_char) -> herr_t; pub fn H5Pget_virtual_prefix(dapl_id: hid_t, prefix: *mut c_char, size: size_t) -> ssize_t; } -#[cfg(hdf5_1_10_5)] +#[cfg(feature = "1.10.5")] extern "C" { pub fn H5Pget_dset_no_attrs_hint(dcpl_id: hid_t, minimize: *mut hbool_t) -> herr_t; pub fn H5Pset_dset_no_attrs_hint(dcpl_id: hid_t, minimize: hbool_t) -> herr_t; } -#[cfg(hdf5_1_12_0)] +#[cfg(any(all(feature = "1.10.7", not(feature = "1.12.0")), feature = "1.12.1"))] +extern "C" { + pub fn H5Pget_file_locking( + fapl_id: hid_t, use_file_locking: *mut hbool_t, ignore_when_disable: *mut hbool_t, + ) -> herr_t; + pub fn H5Pset_file_locking( + fapl_id: hid_t, use_file_locking: hbool_t, ignore_when_disable: hbool_t, + ) -> herr_t; + +} + +#[cfg(feature = "1.12.0")] extern "C" { pub fn H5Pget_vol_id(plist_id: hid_t, vol_id: *mut hid_t) -> herr_t; pub fn H5Pget_vol_info(plist_id: hid_t, vol_info: *mut *mut c_void) -> herr_t; pub fn H5Pset_vol(plist_id: hid_t, new_vol_id: hid_t, new_vol_id: *const c_void) -> herr_t; } + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Pget_driver_config_str( + fapl_id: hid_t, config_buf: *mut c_char, buf_size: size_t, + ) -> ssize_t; + pub fn H5Pget_vol_cap_flags(plist_id: hid_t, cap_flags: *mut c_uint) -> herr_t; + pub fn H5Pset_dataset_io_hyperslab_selection( + plist_id: hid_t, rank: c_uint, op: H5S_seloper_t, start: *const hsize_t, + stride: *const hsize_t, count: *const hsize_t, block: *const hsize_t, + ) -> herr_t; + pub fn H5Pset_driver_by_name( + plist_id: hid_t, driver_name: *const c_char, driver_config: *const c_char, + ) -> herr_t; + pub fn H5Pset_driver_by_value( + plist_id: hid_t, driver_value: H5FD_class_value_t, driver_config: *const c_char, + ) -> herr_t; +} diff --git a/hdf5-sys/src/h5pl.rs b/hdf5-sys/src/h5pl.rs index 8fe3fba03..5bf9250e6 100644 --- a/hdf5-sys/src/h5pl.rs +++ b/hdf5-sys/src/h5pl.rs @@ -1,6 +1,7 @@ +//! Programmatically controlling dynamically loaded plugins use crate::internal_prelude::*; -#[cfg(hdf5_1_8_15)] +#[cfg(feature = "1.8.15")] mod hdf5_1_8_15 { use super::*; @@ -9,9 +10,9 @@ mod hdf5_1_8_15 { pub enum H5PL_type_t { H5PL_TYPE_ERROR = -1, H5PL_TYPE_FILTER = 0, - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] H5PL_VOL, - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] H5PL_TYPE_NONE, } @@ -26,10 +27,10 @@ mod hdf5_1_8_15 { } } -#[cfg(hdf5_1_8_15)] +#[cfg(feature = "1.8.15")] pub use self::hdf5_1_8_15::*; -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] extern "C" { pub fn H5PLappend(search_path: *const c_char) -> herr_t; pub fn H5PLprepend(search_path: *const c_char) -> herr_t; diff --git a/hdf5-sys/src/h5r.rs b/hdf5-sys/src/h5r.rs index 4062c1eea..f1ac374e3 100644 --- a/hdf5-sys/src/h5r.rs +++ b/hdf5-sys/src/h5r.rs @@ -1,12 +1,18 @@ +//! Creating and manipulating references to specific objects and data regions in an HDF5 file pub use self::H5R_type_t::*; +#[cfg(not(feature = "1.10.0"))] +pub use H5Rdereference1 as H5Rdereference; +#[cfg(feature = "1.10.0")] +pub use H5Rdereference2 as H5Rdereference; use crate::internal_prelude::*; +use crate::h5g::H5G_obj_t; use crate::h5o::H5O_type_t; #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] -#[cfg(not(hdf5_1_12_0))] +#[cfg(not(feature = "1.12.0"))] pub enum H5R_type_t { H5R_BADTYPE = -1, H5R_OBJECT = 0, @@ -16,7 +22,7 @@ pub enum H5R_type_t { #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub enum H5R_type_t { H5R_BADTYPE = -1, H5R_OBJECT1 = 0, @@ -30,17 +36,14 @@ pub enum H5R_type_t { pub type hobj_ref_t = haddr_t; pub type hdset_reg_ref_t = [c_uchar; 12usize]; -#[cfg(not(hdf5_1_10_0))] -extern "C" { - pub fn H5Rdereference(dataset: hid_t, ref_type: H5R_type_t, ref_: *const c_void) -> hid_t; -} - extern "C" { pub fn H5Rcreate( ref_: *mut c_void, loc_id: hid_t, name: *const c_char, ref_type: H5R_type_t, space_id: hid_t, ) -> herr_t; pub fn H5Rget_region(dataset: hid_t, ref_type: H5R_type_t, ref_: *const c_void) -> hid_t; + #[deprecated(note = "deprecated in HDF5 1.8.0, use H5Rget_obj_type2")] + pub fn H5Rget_obj_type1(id: hid_t, ref_type: H5R_type_t, ref_: *const c_void) -> H5G_obj_t; pub fn H5Rget_obj_type2( id: hid_t, ref_type: H5R_type_t, ref_: *const c_void, obj_type: *mut H5O_type_t, ) -> herr_t; @@ -49,22 +52,23 @@ extern "C" { ) -> ssize_t; } -#[cfg(hdf5_1_10_0)] extern "C" { - #[deprecated(note = "deprecated in HDF5 1.10.0, use H5Rdereference2()")] + #[cfg_attr( + feature = "1.10.0", + deprecated(note = "deprecated in HDF5 1.10.0, use H5Rdereference2") + )] + #[cfg_attr(not(feature = "1.10.0"), link_name = "H5Rdereference")] pub fn H5Rdereference1(obj_id: hid_t, ref_type: H5R_type_t, ref_: *const c_void) -> hid_t; + #[cfg(feature = "1.10.0")] pub fn H5Rdereference2( obj_id: hid_t, oapl_id: hid_t, ref_type: H5R_type_t, ref_: *const c_void, ) -> hid_t; } -#[cfg(hdf5_1_10_0)] -pub use self::H5Rdereference1 as H5Rdereference; - -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] pub const H5R_REF_BUF_SIZE: usize = 64; -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] #[repr(C)] #[derive(Copy, Clone)] pub union H5R_ref_t_u { @@ -72,21 +76,21 @@ pub union H5R_ref_t_u { align: i64, } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] impl Default for H5R_ref_t_u { fn default() -> Self { unsafe { std::mem::zeroed() } } } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] #[repr(C)] #[derive(Copy, Clone, Default)] pub struct H5R_ref_t { u: H5R_ref_t_u, } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] extern "C" { pub fn H5Rcopy(src_ref_ptr: *const H5R_ref_t, dst_ref_ptr: *mut H5R_ref_t) -> herr_t; pub fn H5Rcreate_attr( @@ -115,3 +119,19 @@ extern "C" { pub fn H5Ropen_object(ref_ptr: *const H5R_ref_t, rapl_id: hid_t, oapl_id: hid_t) -> hid_t; pub fn H5Ropen_region(ref_ptr: *const H5R_ref_t, rapl_id: hid_t, oapl_id: hid_t) -> hid_t; } + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Ropen_attr_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, + ref_ptr: *mut H5R_ref_t, rapl_id: hid_t, aapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Ropen_object_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, + ref_ptr: *mut H5R_ref_t, rapl_id: hid_t, oapl_id: hid_t, es_id: hid_t, + ) -> hid_t; + pub fn H5Ropen_region_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, + ref_ptr: *mut H5R_ref_t, rapl_id: hid_t, oapl_id: hid_t, es_id: hid_t, + ) -> hid_t; +} diff --git a/hdf5-sys/src/h5s.rs b/hdf5-sys/src/h5s.rs index 29036fb73..ff9e9d7e7 100644 --- a/hdf5-sys/src/h5s.rs +++ b/hdf5-sys/src/h5s.rs @@ -1,12 +1,17 @@ +//! Creating and manipulating dataspaces in which to store elements of a dataset pub use self::H5S_class_t::*; pub use self::H5S_sel_type::*; pub use self::H5S_seloper_t::*; +#[cfg(not(feature = "1.12.0"))] +pub use self::H5Sencode1 as H5Sencode; +#[cfg(feature = "1.12.0")] +pub use self::H5Sencode2 as H5Sencode; use crate::internal_prelude::*; pub const H5S_ALL: hid_t = 0; -pub const H5S_UNLIMITED: hsize_t = (-1 as hssize_t) as _; +pub const H5S_UNLIMITED: hsize_t = !0; pub const H5S_MAX_RANK: c_uint = 32; @@ -56,10 +61,13 @@ extern "C" { ) -> herr_t; pub fn H5Scopy(space_id: hid_t) -> hid_t; pub fn H5Sclose(space_id: hid_t) -> herr_t; - #[cfg_attr(hdf5_1_12_0, deprecated(note = "deprecated in HDF5 1.12.0, use H5Sencode2()"))] - #[cfg_attr(not(hdf5_1_12_0), link_name = "H5Sencode")] + #[cfg_attr( + feature = "1.12.0", + deprecated(note = "deprecated in HDF5 1.12.0, use H5Sencode2()") + )] + #[cfg_attr(not(feature = "1.12.0"), link_name = "H5Sencode")] pub fn H5Sencode1(obj_id: hid_t, buf: *mut c_void, nalloc: *mut size_t) -> herr_t; - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] pub fn H5Sencode2( obj_id: hid_t, buf: *mut c_void, nalloc: *mut size_t, fapl_id: hid_t, ) -> herr_t; @@ -98,12 +106,7 @@ extern "C" { pub fn H5Sget_select_type(spaceid: hid_t) -> H5S_sel_type; } -#[cfg(not(hdf5_1_12_0))] -pub use self::H5Sencode1 as H5Sencode; -#[cfg(hdf5_1_12_0)] -pub use self::H5Sencode2 as H5Sencode; - -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] extern "C" { pub fn H5Sis_regular_hyperslab(spaceid: hid_t) -> htri_t; pub fn H5Sget_regular_hyperslab( @@ -112,7 +115,7 @@ extern "C" { ) -> htri_t; } -#[cfg(hdf5_1_12_0)] +#[cfg(any(feature = "1.12.0", feature = "1.10.7"))] extern "C" { pub fn H5Scombine_hyperslab( space_id: hid_t, op: H5S_seloper_t, start: *const hsize_t, stride: *const hsize_t, @@ -120,12 +123,6 @@ extern "C" { ) -> hid_t; pub fn H5Scombine_select(space1_id: hid_t, op: H5S_seloper_t, space2_id: hid_t) -> hid_t; pub fn H5Smodify_select(space1_id: hid_t, op: H5S_seloper_t, space2_id: hid_t) -> herr_t; - pub fn H5Ssel_iter_close(sel_iter_id: hid_t) -> herr_t; - pub fn H5Ssel_iter_create(space_id: hid_t, elmt_size: size_t, flags: c_uint) -> hid_t; - pub fn H5Ssel_iter_get_seq_list( - sel_iter_id: hid_t, maxseq: size_t, maxbytes: size_t, nseq: *mut size_t, - nbytes: *mut size_t, off: *mut hsize_t, len: *mut size_t, - ) -> herr_t; pub fn H5Sselect_adjust(space_id: hid_t, offset: *const hssize_t) -> herr_t; pub fn H5Sselect_copy(dst_id: hid_t, src_id: hid_t) -> herr_t; pub fn H5Sselect_intersect_block( @@ -136,3 +133,18 @@ extern "C" { ) -> hid_t; pub fn H5Sselect_shape_same(space1_id: hid_t, space2_id: hid_t) -> htri_t; } + +#[cfg(feature = "1.12.0")] +extern "C" { + pub fn H5Ssel_iter_close(sel_iter_id: hid_t) -> herr_t; + pub fn H5Ssel_iter_create(space_id: hid_t, elmt_size: size_t, flags: c_uint) -> hid_t; + pub fn H5Ssel_iter_get_seq_list( + sel_iter_id: hid_t, maxseq: size_t, maxbytes: size_t, nseq: *mut size_t, + nbytes: *mut size_t, off: *mut hsize_t, len: *mut size_t, + ) -> herr_t; +} + +#[cfg(feature = "1.12.1")] +extern "C" { + pub fn H5Ssel_iter_reset(sel_iter_id: hid_t, space_id: hid_t) -> hid_t; +} diff --git a/hdf5-sys/src/h5t.rs b/hdf5-sys/src/h5t.rs index 0636098cc..97ed1f15e 100644 --- a/hdf5-sys/src/h5t.rs +++ b/hdf5-sys/src/h5t.rs @@ -1,3 +1,4 @@ +//! Creating and manipulating datatypes which describe elements of a dataset use std::mem; pub use self::H5T_bkg_t::*; @@ -13,6 +14,10 @@ pub use self::H5T_pad_t::*; pub use self::H5T_pers_t::*; pub use self::H5T_sign_t::*; pub use self::H5T_str_t::*; +pub use { + H5Tarray_create2 as H5Tarray_create, H5Tcommit2 as H5Tcommit, + H5Tget_array_dims2 as H5Tget_array_dims, H5Topen2 as H5Topen, +}; use crate::internal_prelude::*; @@ -34,7 +39,7 @@ pub enum H5T_class_t { H5T_NCLASSES = 11, } -#[cfg(hdf5_1_8_6)] +#[cfg(feature = "1.8.6")] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5T_order_t { @@ -46,7 +51,7 @@ pub enum H5T_order_t { H5T_ORDER_NONE = 4, } -#[cfg(not(hdf5_1_8_6))] +#[cfg(not(feature = "1.8.6"))] #[repr(C)] #[derive(Copy, Clone, PartialEq, PartialOrd, Debug)] pub enum H5T_order_t { @@ -99,7 +104,7 @@ pub enum H5T_cset_t { impl Default for H5T_cset_t { fn default() -> Self { - H5T_cset_t::H5T_CSET_ASCII + Self::H5T_CSET_ASCII } } @@ -332,6 +337,16 @@ extern "C" { src_id: hid_t, dst_id: hid_t, nelmts: size_t, buf: *mut c_void, background: *mut c_void, plist_id: hid_t, ) -> herr_t; + #[deprecated(note = "deprecated since HDF5 1.8.0, use H5Tcommit2")] + pub fn H5Tcommit1(loc_id: hid_t, name: *const c_char, type_id: hid_t) -> herr_t; + #[deprecated(note = "deprecated since HDF5 1.8.0, use H5Tcommit2")] + pub fn H5Topen1(loc_id: hid_t, name: *const c_char) -> hid_t; + #[deprecated(note = "deprecated since HDF5 1.8.0, use H5Tarray_create2")] + pub fn H5Tarray_create1( + base_id: hid_t, ndims: c_int, dim: *const hsize_t, perm: *const c_int, + ) -> hid_t; + #[deprecated(note = "deprecated since HDF5 1.8.0, use H5Tget_array_dims2")] + pub fn H5Tget_array_dims1(type_id: hid_t, dims: *mut hsize_t, perm: *mut c_int) -> c_int; } pub use self::globals::*; @@ -426,7 +441,7 @@ mod globals { extern_static!(H5T_NATIVE_UINT_LEAST64, H5T_NATIVE_UINT_LEAST64_g); extern_static!(H5T_NATIVE_INT_FAST64, H5T_NATIVE_INT_FAST64_g); extern_static!(H5T_NATIVE_UINT_FAST64, H5T_NATIVE_UINT_FAST64_g); - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] extern_static!(H5T_STD_REF, H5T_STD_REF_g); } @@ -521,17 +536,34 @@ mod globals { extern_static!(H5T_NATIVE_UINT_LEAST64, __imp_H5T_NATIVE_UINT_LEAST64_g); extern_static!(H5T_NATIVE_INT_FAST64, __imp_H5T_NATIVE_INT_FAST64_g); extern_static!(H5T_NATIVE_UINT_FAST64, __imp_H5T_NATIVE_UINT_FAST64_g); - #[cfg(hdf5_1_12_0)] + #[cfg(feature = "1.12.0")] extern_static!(H5T_STD_REF, __imp_H5T_STD_REF_g); } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] extern "C" { pub fn H5Tflush(type_id: hid_t) -> herr_t; pub fn H5Trefresh(type_id: hid_t) -> herr_t; } -#[cfg(hdf5_1_12_0)] +#[cfg(feature = "1.12.0")] extern "C" { pub fn H5Treclaim(type_id: hid_t, space_id: hid_t, dxpl_id: hid_t, buf: *mut c_void) -> herr_t; } + +#[cfg(feature = "1.13.0")] +extern "C" { + pub fn H5Tclose_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, type_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Tcommit_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, type_id: hid_t, lcpl_id: hid_t, tcpl_id: hid_t, tapl_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5Topen_async( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, tapl_id: hid_t, es_id: hid_t, + ) -> hid_t; +} diff --git a/hdf5-sys/src/h5vl.rs b/hdf5-sys/src/h5vl.rs index 5119b0226..c3d582c38 100644 --- a/hdf5-sys/src/h5vl.rs +++ b/hdf5-sys/src/h5vl.rs @@ -1,11 +1,52 @@ -#![cfg(hdf5_1_12_0)] +//! Using the Virtual Object Layer +#![cfg(feature = "1.12.0")] + use crate::internal_prelude::*; pub type H5VL_class_value_t = c_int; // Incomplete type +#[cfg(all(feature = "1.12.0", not(feature = "1.13.0")))] pub type H5VL_class_t = c_void; +#[repr(C)] +#[derive(Debug, Copy, Clone)] +#[cfg(feature = "1.13.0")] +pub struct H5VL_class_t { + pub version: c_uint, + pub value: H5VL_class_value_t, + pub name: *const c_char, + pub conn_version: c_uint, + pub cap_flags: c_uint, + pub initialize: Option herr_t>, + pub terminate: Option herr_t>, + + pub info_cls: H5VL_info_class_t, + pub wrap_cls: H5VL_wrap_class_t, + + pub attr_cls: H5VL_attr_class_t, + pub dataset_cls: H5VL_dataset_class_t, + pub datatype_cls: H5VL_datatype_class_t, + pub file_cl: H5VL_file_class_t, + pub group_cls: H5VL_group_class_t, + pub link_cls: H5VL_link_class_t, + pub object_cls: H5VL_object_class_t, + + pub introspect_cls: H5VL_introspect_class_t, + pub request_cls: H5VL_request_class_t, + pub blob_cls: H5VL_blob_class_t, + pub token_cls: H5VL_token_class_t, + + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_optional_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, +} + extern "C" { pub fn H5VLclose(connector_id: hid_t) -> herr_t; pub fn H5VLget_connector_id(obj_id: hid_t) -> hid_t; @@ -21,3 +62,1870 @@ extern "C" { ) -> hid_t; pub fn H5VLunregister_connector(vol_id: hid_t) -> herr_t; } + +#[cfg(feature = "1.12.1")] +#[repr(C)] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum H5VL_subclass_t { + H5VL_SUBCLS_NONE, + H5VL_SUBCLS_INFO, + H5VL_SUBCLS_WRAP, + H5VL_SUBCLS_ATTR, + H5VL_SUBCLS_DATASET, + H5VL_SUBCLS_DATATYPE, + H5VL_SUBCLS_FILE, + H5VL_SUBCLS_GROUP, + H5VL_SUBCLS_LINK, + H5VL_SUBCLS_OBJECT, + H5VL_SUBCLS_REQUEST, + H5VL_SUBCLS_BLOB, + H5VL_SUBCLS_TOKEN, +} + +#[cfg(feature = "1.12.1")] +extern "C" { + pub fn H5VLquery_optional( + obj_id: hid_t, subcls: H5VL_subclass_t, opt_type: c_int, supported: *mut hbool_t, + ) -> herr_t; +} + +#[cfg(feature = "1.13.0")] +pub use v1_13_0::*; +#[cfg(feature = "1.13.0")] +mod v1_13_0 { + use std::fmt::{self, Debug}; + use std::mem::ManuallyDrop; + + use crate::{ + h5a::{H5A_info_t, H5A_operator2_t}, + h5d::H5D_space_status_t, + h5f::H5F_scope_t, + h5g::H5G_info_t, + h5i::H5I_type_t, + h5l::{H5L_info2_t, H5L_iterate2_t, H5L_type_t}, + h5o::{H5O_info2_t, H5O_iterate2_t, H5O_token_t, H5O_type_t}, + }; + + use super::*; + + macro_rules! impl_debug_args { + ($ty:ty, $tag:ident, $args:ty, {$($variant:ident => $func:expr),+$(,)*}) => { + #[allow(unreachable_patterns)] + impl std::fmt::Debug for $ty { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut s = f.debug_struct(stringify!($ty)); + s.field("op_type", &self.op_type); + match self.op_type { + $($tag::$variant => { + s.field("args", &($func as fn($args) -> _)(self.args)); + })+ + _ => {} + } + s.finish() + } + } + }; + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_info_class_t { + pub size: size_t, + pub copy: Option *mut c_void>, + pub cmp: Option< + extern "C" fn( + cmp_value: *mut c_int, + info1: *const c_void, + info2: *const c_void, + ) -> herr_t, + >, + pub free: Option herr_t>, + pub to_str: Option herr_t>, + pub from_str: Option herr_t>, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_wrap_class_t { + pub get_object: Option *mut c_void>, + pub get_wrap_ctx: + Option herr_t>, + pub wrap_object: Option< + extern "C" fn( + obj: *mut c_void, + obj_type: H5I_type_t, + wrap_ctx: *mut c_void, + ) -> *mut c_void, + >, + pub unwrap_object: Option *mut c_void>, + pub free_wrap_ctx: Option herr_t>, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_attr_get_t { + H5VL_ATTR_GET_ACPL, + H5VL_ATTR_GET_INFO, + H5VL_ATTR_GET_NAME, + H5VL_ATTR_GET_SPACE, + H5VL_ATTR_GET_STORAGE_SIZE, + H5VL_ATTR_GET_TYPE, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_get_args_t_union_get_acpl { + pub acpl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_get_args_t_union_get_space { + pub space_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_get_args_t_union_get_storage_size { + pub data_size: *mut hsize_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_get_args_t_union_get_type { + pub type_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_get_name_args_t { + pub loc_params: H5VL_loc_params_t, + pub buf_size: size_t, + pub buf: *mut c_char, + pub attr_name_len: *mut size_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_get_info_args_t { + pub loc_params: H5VL_loc_params_t, + pub attr_name: *const c_char, + pub ainfo: *mut H5A_info_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_attr_get_args_t_union { + pub get_acpl: ManuallyDrop, + pub get_info: ManuallyDrop, + pub get_name: ManuallyDrop, + pub get_space: ManuallyDrop, + pub get_storage_size: ManuallyDrop, + pub get_type: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_attr_get_args_t { + pub op_type: H5VL_attr_get_t, + pub args: H5VL_attr_get_args_t_union, + } + + impl_debug_args!( + H5VL_attr_get_args_t, + H5VL_attr_get_t, + H5VL_attr_get_args_t_union, + { + H5VL_ATTR_GET_ACPL => |args| unsafe { args.get_acpl }, + H5VL_ATTR_GET_INFO => |args| unsafe { args.get_info }, + H5VL_ATTR_GET_NAME => |args| unsafe { args.get_name }, + H5VL_ATTR_GET_SPACE => |args| unsafe { args.get_space }, + H5VL_ATTR_GET_STORAGE_SIZE => |args| unsafe { args.get_storage_size }, + H5VL_ATTR_GET_TYPE => |args| unsafe { args.get_type }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_attr_specific_t { + H5VL_ATTR_DELETE, + H5VL_ATTR_DELETE_BY_IDX, + H5VL_ATTR_EXISTS, + H5VL_ATTR_ITER, + H5VL_ATTR_RENAME, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_specific_args_t_union_del { + pub name: *const c_char, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_specific_args_t_union_exists { + pub name: *const c_char, + pub exists: *mut hbool_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_specific_args_t_union_rename { + pub old_name: *const c_char, + pub new_name: *const c_char, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_iterate_args_t { + pub idx_type: H5_index_t, + pub order: H5_iter_order_t, + pub idx: *mut hsize_t, + pub op: H5A_operator2_t, + pub op_data: *mut c_void, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_delete_by_idx_args_t { + pub idx_type: H5_index_t, + pub order: H5_iter_order_t, + pub n: hsize_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_attr_specific_args_t_union { + pub del: ManuallyDrop, + pub delete_by_idx: ManuallyDrop, + pub exists: ManuallyDrop, + pub iterate: ManuallyDrop, + pub rename: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_attr_specific_args_t { + pub op_type: H5VL_attr_specific_t, + pub args: H5VL_attr_specific_args_t_union, + } + + impl_debug_args!( + H5VL_attr_specific_args_t, + H5VL_attr_specific_t, + H5VL_attr_specific_args_t_union, + { + H5VL_ATTR_DELETE => |args| unsafe { args.del }, + H5VL_ATTR_DELETE_BY_IDX => |args| unsafe { args.delete_by_idx }, + H5VL_ATTR_EXISTS => |args| unsafe { args.exists }, + H5VL_ATTR_ITER => |args| unsafe { args.iterate }, + H5VL_ATTR_RENAME => |args| unsafe { args.rename }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_attr_class_t { + pub create: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + attr_name: *const c_char, + type_id: hid_t, + space_id: hid_t, + acpl_id: hid_t, + aapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub open: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + attr_name: *const c_char, + aapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub read: Option< + extern "C" fn( + attr: *mut c_void, + mem_type_id: hid_t, + buf: *mut c_void, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub write: Option< + extern "C" fn( + attr: *mut c_void, + mem_type_id: hid_t, + buf: *const c_void, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub get: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_attr_get_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub specific: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + args: *mut H5VL_attr_specific_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_optional_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub close: Option< + extern "C" fn(attr: *mut c_void, dxpl_id: hid_t, req: *mut *mut c_void) -> herr_t, + >, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_dataset_specific_t { + H5VL_DATASET_SET_EXTENT, + H5VL_DATASET_FLUSH, + H5VL_DATASET_REFRESH, + } + + #[repr(C)] + #[derive(Copy, Clone, Debug)] + pub struct H5VL_dataset_specific_args_t_union_set_extent { + pub size: *const hsize_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_specific_args_t_union_flush { + pub dset_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_specific_args_t_union_refresh { + pub dset_id: hid_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_dataset_specific_args_t_union { + pub set_extent: ManuallyDrop, + pub flush: ManuallyDrop, + pub refresh: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_dataset_specific_args_t { + pub op_type: H5VL_dataset_specific_t, + pub args: H5VL_dataset_specific_args_t_union, + } + + impl_debug_args!( + H5VL_dataset_specific_args_t, + H5VL_dataset_specific_t, + H5VL_dataset_specific_args_t_union, + { + H5VL_DATASET_SET_EXTENT => |args| unsafe { args.set_extent }, + H5VL_DATASET_FLUSH => |args| unsafe { args.flush }, + H5VL_DATASET_REFRESH => |args| unsafe { args.refresh }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_dataset_get_t { + H5VL_DATASET_GET_DAPL, + H5VL_DATASET_GET_DCPL, + H5VL_DATASET_GET_SPACE, + H5VL_DATASET_GET_SPACE_STATUS, + H5VL_DATASET_GET_STORAGE_SIZE, + H5VL_DATASET_GET_TYPE, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_get_args_t_union_get_dapl { + pub dapl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_get_args_t_union_get_dcpl { + pub dcpl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_get_args_t_union_get_space { + pub space_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_get_args_t_union_get_space_status { + pub status: *mut H5D_space_status_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_get_args_t_union_get_storage_size { + pub storage_size: *mut hsize_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_get_args_t_union_get_type { + pub type_id: hid_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_dataset_get_args_t_union { + pub get_dapl: ManuallyDrop, + pub get_dcpl: ManuallyDrop, + pub get_space: ManuallyDrop, + pub get_space_status: ManuallyDrop, + pub get_storage_size: ManuallyDrop, + pub get_type: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_dataset_get_args_t { + pub op_type: H5VL_dataset_get_t, + pub args: H5VL_dataset_get_args_t_union, + } + + impl_debug_args!( + H5VL_dataset_get_args_t, + H5VL_dataset_get_t, + H5VL_dataset_get_args_t_union, + { + H5VL_DATASET_GET_DAPL => |args| unsafe { args.get_dapl }, + H5VL_DATASET_GET_DCPL => |args| unsafe { args.get_dcpl }, + H5VL_DATASET_GET_SPACE => |args| unsafe { args.get_space }, + H5VL_DATASET_GET_SPACE_STATUS => |args| unsafe { args.get_space_status }, + H5VL_DATASET_GET_STORAGE_SIZE => |args| unsafe { args.get_storage_size }, + H5VL_DATASET_GET_TYPE => |args| unsafe { args.get_type }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_dataset_class_t { + pub create: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + name: *const c_char, + lcpl_id: hid_t, + type_id: hid_t, + space_id: hid_t, + dcpl_id: hid_t, + dapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub open: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + name: *const c_char, + dapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub read: Option< + extern "C" fn( + dset: *mut c_void, + mem_type_id: hid_t, + mem_space_id: hid_t, + file_space_id: hid_t, + dxpl_id: hid_t, + buf: *mut c_void, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub write: Option< + extern "C" fn( + dset: *mut c_void, + mem_type_id: hid_t, + mem_space_id: hid_t, + file_space_id: hid_t, + dxpl_id: hid_t, + buf: *const c_void, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub get: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_dataset_get_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub specific: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_dataset_specific_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_optional_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub close: Option< + extern "C" fn(dset: *mut c_void, dxpl_id: hid_t, req: *mut *mut c_void) -> herr_t, + >, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_file_specific_t { + H5VL_FILE_FLUSH, + H5VL_FILE_REOPEN, + H5VL_FILE_IS_ACCESSIBLE, + H5VL_FILE_DELETE, + H5VL_FILE_IS_EQUAL, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_datatype_specific_args_t_union_flush { + pub type_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_datatype_specific_args_t_union_refresh { + pub type_id: hid_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_datatype_specific_args_t_union { + pub flush: ManuallyDrop, + pub refresh: ManuallyDrop, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_datatype_specific_t { + H5VL_DATATYPE_FLUSH, + H5VL_DATATYPE_REFRESH, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_datatype_specific_args_t { + pub op_type: H5VL_datatype_specific_t, + pub args: H5VL_datatype_specific_args_t_union, + } + + impl_debug_args!( + H5VL_datatype_specific_args_t, + H5VL_datatype_specific_t, + H5VL_datatype_specific_args_t_union, + { + H5VL_DATATYPE_FLUSH => |args| unsafe { args.flush }, + H5VL_DATATYPE_REFRESH => |args| unsafe { args.refresh }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_datatype_get_t { + H5VL_DATATYPE_GET_BINARY_SIZE, + H5VL_DATATYPE_GET_BINARY, + H5VL_DATATYPE_GET_TCPL, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_datatype_get_args_t_union_get_binary_size { + pub size: *mut size_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_datatype_get_args_t_union_get_binary { + pub buf: *mut c_void, + pub buf_size: size_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_datatype_get_args_t_union_get_tcpl { + pub tcpl_id: hid_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_datatype_get_args_t_union { + pub get_binary_size: ManuallyDrop, + pub get_binary: ManuallyDrop, + pub get_tcpl: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_datatype_get_args_t { + pub op_type: H5VL_datatype_get_t, + pub args: H5VL_datatype_get_args_t_union, + } + + impl_debug_args!( + H5VL_datatype_get_args_t, + H5VL_datatype_get_t, + H5VL_datatype_get_args_t_union, + { + H5VL_DATATYPE_GET_BINARY_SIZE => |args| unsafe { args.get_binary_size }, + H5VL_DATATYPE_GET_BINARY => |args| unsafe { args.get_binary }, + H5VL_DATATYPE_GET_TCPL => |args| unsafe { args.get_tcpl }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_datatype_class_t { + pub commit: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + name: *const c_char, + type_id: hid_t, + lcpl_id: hid_t, + tcpl_id: hid_t, + tapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub open: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + name: *const c_char, + tapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub get: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_datatype_get_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub specific: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_datatype_specific_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_optional_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub close: + Option herr_t>, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_specific_args_t_union_flush { + pub obj_type: H5I_type_t, + pub scope: H5F_scope_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_specific_args_t_union_reopen { + pub file: *mut *mut c_void, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_specific_args_t_union_is_accessible { + pub filename: *const c_char, + pub fapl_id: hid_t, + pub accessible: *mut hbool_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_specific_args_t_union_del { + pub filename: *const c_char, + pub fapl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_specific_args_t_union_is_equal { + pub obj2: *mut c_void, + pub same_file: *mut hbool_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_file_specific_args_t_union { + pub flush: ManuallyDrop, + pub reopen: ManuallyDrop, + pub is_accessible: ManuallyDrop, + pub del: ManuallyDrop, + pub is_equal: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_file_specific_args_t { + pub op_type: H5VL_file_specific_t, + pub args: H5VL_file_specific_args_t_union, + } + + impl_debug_args!( + H5VL_file_specific_args_t, + H5VL_file_specific_t, + H5VL_file_specific_args_t_union, + { + H5VL_FILE_FLUSH => |args| unsafe { args.flush }, + H5VL_FILE_REOPEN => |args| unsafe { args.reopen }, + H5VL_FILE_IS_ACCESSIBLE => |args| unsafe { args.is_accessible }, + H5VL_FILE_DELETE => |args| unsafe { args.del }, + H5VL_FILE_IS_EQUAL => |args| unsafe { args.is_equal }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_cont_info_t { + pub version: c_uint, + pub feature_flags: u64, + pub token_size: size_t, + pub blob_id_size: size_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_get_args_t_union_get_cont_info { + pub info: *mut H5VL_file_cont_info_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_get_args_t_union_get_fapl { + pub fapl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_get_args_t_union_get_fcpl { + pub fcpl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_get_args_t_union_get_fileno { + pub fileno: *mut c_ulong, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_get_args_t_union_get_intent { + pub flags: *mut c_uint, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_get_args_t_union_get_obj_count { + pub types: c_uint, + pub count: *mut size_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_get_obj_ids_args_t { + pub types: c_uint, + pub max_objs: size_t, + pub old_list: *mut hid_t, + pub count: *mut size_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_get_name_args_t { + pub r#type: H5I_type_t, + pub buf_size: size_t, + pub buf: *mut c_char, + pub file_name_len: *mut size_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_file_get_args_t_union { + pub get_cont_info: ManuallyDrop, + pub get_fapl: ManuallyDrop, + pub get_fcpl: ManuallyDrop, + pub get_fileno: ManuallyDrop, + pub get_intent: ManuallyDrop, + pub get_name: ManuallyDrop, + pub get_obj_count: ManuallyDrop, + pub get_obj_ids: ManuallyDrop, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_file_get_t { + H5VL_FILE_GET_CONT_INFO, + H5VL_FILE_GET_FAPL, + H5VL_FILE_GET_FCPL, + H5VL_FILE_GET_FILENO, + H5VL_FILE_GET_INTENT, + H5VL_FILE_GET_NAME, + H5VL_FILE_GET_OBJ_COUNT, + H5VL_FILE_GET_OBJ_IDS, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_file_get_args_t { + pub op_type: H5VL_file_get_t, + pub args: H5VL_file_get_args_t_union, + } + + impl_debug_args!( + H5VL_file_get_args_t, + H5VL_file_get_t, + H5VL_file_get_args_t_union, + { + H5VL_FILE_GET_CONT_INFO => |args| unsafe { args.get_cont_info }, + H5VL_FILE_GET_FAPL => |args| unsafe { args.get_fapl }, + H5VL_FILE_GET_FCPL => |args| unsafe { args.get_fcpl }, + H5VL_FILE_GET_FILENO => |args| unsafe { args.get_fileno }, + H5VL_FILE_GET_INTENT => |args| unsafe { args.get_intent }, + H5VL_FILE_GET_NAME => |args| unsafe { args.get_name }, + H5VL_FILE_GET_OBJ_COUNT => |args| unsafe { args.get_obj_count }, + H5VL_FILE_GET_OBJ_IDS => |args| unsafe { args.get_obj_ids }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_file_class_t { + pub create: Option< + extern "C" fn( + name: *const c_char, + flags: c_uint, + fcpl_id: hid_t, + fapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub open: Option< + extern "C" fn( + name: *const c_char, + flags: c_uint, + fapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub get: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_file_get_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub specific: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_file_specific_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_optional_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub close: Option< + extern "C" fn(file: *mut c_void, dxpl_id: hid_t, req: *mut *mut c_void) -> herr_t, + >, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_group_specific_t { + H5VL_GROUP_MOUNT, + H5VL_GROUP_UNMOUNT, + H5VL_GROUP_FLUSH, + H5VL_GROUP_REFRESH, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_group_spec_mount_args_t { + pub name: *const c_char, + pub child_file: *mut c_void, + pub fmpl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_group_specific_args_t_union_unmount { + pub name: *const c_char, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_group_specific_args_t_union_flush { + pub grp_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_group_specific_args_t_union_refresh { + pub grp_id: hid_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_group_specific_args_t_union { + pub mount: ManuallyDrop, + pub unmount: ManuallyDrop, + pub flush: ManuallyDrop, + pub refresh: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_group_specific_args_t { + pub op_type: H5VL_group_specific_t, + pub args: H5VL_group_specific_args_t_union, + } + + impl_debug_args!( + H5VL_group_specific_args_t, + H5VL_group_specific_t, + H5VL_group_specific_args_t_union, + { + H5VL_GROUP_MOUNT => |args| unsafe { args.mount }, + H5VL_GROUP_UNMOUNT => |args| unsafe { args.unmount }, + H5VL_GROUP_FLUSH => |args| unsafe { args.flush }, + H5VL_GROUP_REFRESH => |args| unsafe { args.refresh }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_group_get_info_args_t { + pub loc_params: H5VL_loc_params_t, + pub ginfo: *mut H5G_info_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_group_get_args_t_union_get_gcpl { + pub gcpl_id: hid_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_group_get_args_t_union { + pub get_gcpl: ManuallyDrop, + pub get_info: ManuallyDrop, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_group_get_t { + H5VL_GROUP_GET_GCPL, + H5VL_GROUP_GET_INFO, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_group_get_args_t { + pub op_type: H5VL_group_get_t, + pub args: H5VL_group_get_args_t_union, + } + + impl_debug_args!( + H5VL_group_get_args_t, + H5VL_group_get_t, + H5VL_group_get_args_t_union, + { + H5VL_GROUP_GET_GCPL => |args| unsafe { args.get_gcpl }, + H5VL_GROUP_GET_INFO => |args| unsafe { args.get_info }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_group_class_t { + pub create: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + name: *const c_char, + lcpl_id: hid_t, + gcpl_id: hid_t, + gapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub open: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + + name: *const c_char, + gapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub get: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_group_get_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub specific: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_group_specific_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + args: *mut H5VL_optional_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub close: Option< + extern "C" fn(grp: *mut c_void, dxpl_id: hid_t, req: *mut *mut c_void) -> herr_t, + >, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_iterate_args_t { + pub recursive: hbool_t, + pub idx_type: H5_index_t, + pub order: H5_iter_order_t, + pub idx_p: *mut hsize_t, + pub op: H5L_iterate2_t, + pub op_data: *mut c_void, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_specific_args_t_union_exists { + pub exists: *mut hbool_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_link_specific_args_t_union { + pub exists: ManuallyDrop, + pub iterate: ManuallyDrop, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_link_specific_t { + H5VL_LINK_DELETE, + H5VL_LINK_EXISTS, + H5VL_LINK_ITER, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_link_specific_args_t { + pub op_type: H5VL_link_specific_t, + pub args: H5VL_link_specific_args_t_union, + } + + impl_debug_args!( + H5VL_link_specific_args_t, + H5VL_link_specific_t, + H5VL_link_specific_args_t_union, + { + H5VL_LINK_EXISTS => |args| unsafe { args.exists }, + H5VL_LINK_ITER => |args| unsafe { args.iterate }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_get_args_t_union_get_info { + pub linfo: *mut H5L_info2_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_get_args_t_union_get_name { + pub name_size: size_t, + pub name: *mut c_char, + pub name_len: *mut size_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_get_args_t_union_get_val { + pub buf_size: size_t, + pub buf: *mut c_void, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_link_get_args_t_union { + pub get_info: ManuallyDrop, + pub get_name: ManuallyDrop, + pub get_val: ManuallyDrop, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_link_get_t { + H5VL_LINK_GET_INFO, + H5VL_LINK_GET_NAME, + H5VL_LINK_GET_VAL, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_link_get_args_t { + pub op_type: H5VL_link_get_t, + pub args: H5VL_link_get_args_t_union, + } + + impl_debug_args!( + H5VL_link_get_args_t, + H5VL_link_get_t, + H5VL_link_get_args_t_union, + { + H5VL_LINK_GET_INFO => |args| unsafe { args.get_info }, + H5VL_LINK_GET_NAME => |args| unsafe { args.get_name }, + H5VL_LINK_GET_VAL => |args| unsafe { args.get_val }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_link_create_t { + H5VL_LINK_CREATE_HARD, + H5VL_LINK_CREATE_SOFT, + H5VL_LINK_CREATE_UD, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_create_args_t_union_hard { + pub curr_obj: *mut c_void, + pub curr_loc_params: H5VL_loc_params_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_create_args_t_union_soft { + pub target: *const c_char, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_create_args_t_union_ud { + pub r#type: H5L_type_t, + pub buf: *const c_void, + pub buf_size: size_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_link_create_args_t_union { + pub hard: ManuallyDrop, + pub soft: ManuallyDrop, + pub ud: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_link_create_args_t { + pub op_type: H5VL_link_create_t, + pub args: H5VL_link_create_args_t_union, + } + + impl_debug_args!( + H5VL_link_create_args_t, + H5VL_link_create_t, + H5VL_link_create_args_t_union, + { + H5VL_LINK_CREATE_HARD => |args| unsafe { args.hard }, + H5VL_LINK_CREATE_SOFT => |args| unsafe { args.soft }, + H5VL_LINK_CREATE_UD => |args| unsafe { args.ud }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_link_class_t { + pub create: Option< + extern "C" fn( + args: *mut H5VL_link_create_args_t, + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + lcpl_id: hid_t, + lapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub copy: Option< + extern "C" fn( + src_obj: *mut c_void, + loc_params1: *const H5VL_loc_params_t, + dest_obj: *mut c_void, + loc_params2: *const H5VL_loc_params_t, + lcpl_id: hid_t, + lapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub r#move: Option< + extern "C" fn( + src_obj: *mut c_void, + loc_params1: *const H5VL_loc_params_t, + dest_obj: *mut c_void, + loc_params2: *const H5VL_loc_params_t, + lcpl_id: hid_t, + lapl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub get: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + args: *mut H5VL_link_get_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub specific: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + args: *mut H5VL_link_specific_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + args: *mut H5VL_optional_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_visit_args_t { + pub idx_type: H5_index_t, + pub order: H5_iter_order_t, + pub fields: c_uint, + pub op: H5O_iterate2_t, + pub op_data: *mut c_void, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_specific_args_t_union_change_rc { + pub delta: c_int, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_specific_args_t_union_exists { + pub exists: *mut hbool_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_specific_args_t_union_lookup { + pub token_ptr: *mut H5O_token_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_specific_args_t_union_flush { + pub obj_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_specific_args_t_union_refresh { + pub obj_id: hid_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_object_specific_args_t_union { + pub change_rc: ManuallyDrop, + pub exists: ManuallyDrop, + pub lookup: ManuallyDrop, + pub visit: ManuallyDrop, + pub flush: ManuallyDrop, + pub refresh: ManuallyDrop, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_object_specific_t { + H5VL_OBJECT_CHANGE_REF_COUNT, + H5VL_OBJECT_EXISTS, + H5VL_OBJECT_LOOKUP, + H5VL_OBJECT_VISIT, + H5VL_OBJECT_FLUSH, + H5VL_OBJECT_REFRESH, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_object_specific_args_t { + pub op_type: H5VL_object_specific_t, + pub args: H5VL_object_specific_args_t_union, + } + + impl_debug_args!( + H5VL_object_specific_args_t, + H5VL_object_specific_t, + H5VL_object_specific_args_t_union, + { + H5VL_OBJECT_CHANGE_REF_COUNT => |args| unsafe { args.change_rc }, + H5VL_OBJECT_EXISTS => |args| unsafe { args.exists }, + H5VL_OBJECT_LOOKUP => |args| unsafe { args.lookup }, + H5VL_OBJECT_VISIT => |args| unsafe { args.visit }, + H5VL_OBJECT_FLUSH => |args| unsafe { args.flush }, + H5VL_OBJECT_REFRESH => |args| unsafe { args.refresh }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_object_get_t { + H5VL_OBJECT_GET_FILE, + H5VL_OBJECT_GET_NAME, + H5VL_OBJECT_GET_TYPE, + H5VL_OBJECT_GET_INFO, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_get_args_t_union_get_file { + pub file: *mut *mut c_void, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_get_args_t_union_get_name { + pub buf_size: size_t, + pub buf: *mut c_char, + pub name_len: *mut size_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_get_args_t_union_get_type { + pub obj_type: *mut H5O_type_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_get_args_t_union_get_info { + pub fields: c_uint, + pub oinfo: *mut H5O_info2_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_object_get_args_t_union { + pub get_file: ManuallyDrop, + pub get_name: ManuallyDrop, + pub get_type: ManuallyDrop, + pub get_info: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_object_get_args_t { + pub op_type: H5VL_object_get_t, + pub args: H5VL_object_get_args_t_union, + } + + impl_debug_args!( + H5VL_object_get_args_t, + H5VL_object_get_t, + H5VL_object_get_args_t_union, + { + H5VL_OBJECT_GET_FILE => |args| unsafe { args.get_file }, + H5VL_OBJECT_GET_NAME => |args| unsafe { args.get_name }, + H5VL_OBJECT_GET_TYPE => |args| unsafe { args.get_type }, + H5VL_OBJECT_GET_INFO => |args| unsafe { args.get_info }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_object_class_t { + pub open: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + opened_type: *mut H5I_type_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> *mut c_void, + >, + pub copy: Option< + extern "C" fn( + src_obj: *mut c_void, + loc_params1: *const H5VL_loc_params_t, + src_name: *const c_char, + dest_obj: *mut c_void, + loc_params2: *const H5VL_loc_params_t, + dst_name: *const c_char, + ocpypl_id: hid_t, + lcpl_id: hid_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub get: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + args: *mut H5VL_object_get_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub specific: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + args: *mut H5VL_object_specific_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + loc_params: *const H5VL_loc_params_t, + args: *mut H5VL_optional_args_t, + dxpl_id: hid_t, + req: *mut *mut c_void, + ) -> herr_t, + >, + } + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_get_conn_lvl_t { + H5VL_GET_CONN_LVL_CURR, + H5VL_GET_CONN_LVL_TERM, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_introspect_class_t { + pub get_conn_cls: Option< + extern "C" fn( + obj: *mut c_void, + lvl: H5VL_get_conn_lvl_t, + conn_cls: *const *const H5VL_class_t, + ) -> herr_t, + >, + pub get_cap_flags: + Option herr_t>, + pub opt_query: Option< + extern "C" fn( + obj: *mut c_void, + cls: H5VL_subclass_t, + opt_type: c_int, + flags: *mut u64, + ) -> herr_t, + >, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_request_status_t { + H5VL_REQUEST_STATUS_IN_PROGRESS, + H5VL_REQUEST_STATUS_SUCCEED, + H5VL_REQUEST_STATUS_FAIL, + H5VL_REQUEST_STATUS_CANT_CANCEL, + H5VL_REQUEST_STATUS_CANCELED, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_request_specific_t { + H5VL_REQUEST_GET_ERR_STACK, + H5VL_REQUEST_GET_EXEC_TIME, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_request_specific_args_t_union_get_err_stack { + pub err_stack_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_request_specific_args_t_union_get_exec_time { + pub exec_ts: *mut u64, + pub exec_time: *mut u64, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_request_specific_args_t_union { + pub get_err_stack: ManuallyDrop, + pub get_exec_time: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_request_specific_args_t { + pub op_type: H5VL_request_specific_t, + pub args: H5VL_request_specific_args_t_union, + } + + impl_debug_args!( + H5VL_request_specific_args_t, + H5VL_request_specific_t, + H5VL_request_specific_args_t_union, + { + H5VL_REQUEST_GET_ERR_STACK => |args| unsafe { args.get_err_stack }, + H5VL_REQUEST_GET_EXEC_TIME => |args| unsafe { args.get_exec_time }, + } + ); + + pub type H5VL_request_notify_t = + Option herr_t>; + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_request_class_t { + pub wait: Option< + extern "C" fn( + req: *mut c_void, + timeout: u64, + status: *mut H5VL_request_status_t, + ) -> herr_t, + >, + pub notify: Option< + extern "C" fn(req: *mut c_void, cb: H5VL_request_notify_t, ctx: *mut c_void) -> herr_t, + >, + pub cancel: + Option herr_t>, + pub specific: Option< + extern "C" fn(req: *mut c_void, args: *mut H5VL_request_specific_args_t) -> herr_t, + >, + pub optional: + Option herr_t>, + pub free: Option herr_t>, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_blob_specific_t { + H5VL_BLOB_DELETE, + H5VL_BLOB_ISNULL, + H5VL_BLOB_SETNULL, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_blob_specific_args_t_union_is_null { + pub isnull: *mut hbool_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_blob_specific_args_t_union { + pub is_null: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_blob_specific_args_t { + pub op_type: H5VL_blob_specific_t, + pub args: H5VL_blob_specific_args_t_union, + } + + impl_debug_args!( + H5VL_blob_specific_args_t, + H5VL_blob_specific_t, + H5VL_blob_specific_args_t_union, + { + H5VL_BLOB_ISNULL => |args| unsafe { args.is_null }, + } + ); + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_blob_class_t { + pub put: Option< + extern "C" fn( + obj: *mut c_void, + buf: *const c_void, + size: size_t, + blob_id: *mut c_void, + ctx: *mut c_void, + ) -> herr_t, + >, + pub get: Option< + extern "C" fn( + obj: *mut c_void, + blob_id: *const c_void, + buf: *mut c_void, + size: size_t, + ctx: *mut c_void, + ) -> herr_t, + >, + pub specific: Option< + extern "C" fn( + obj: *mut c_void, + blob_id: *mut c_void, + args: *mut H5VL_blob_specific_args_t, + ) -> herr_t, + >, + pub optional: Option< + extern "C" fn( + obj: *mut c_void, + blob_id: *mut c_void, + args: *mut H5VL_optional_args_t, + ) -> herr_t, + >, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_token_class_t { + pub cmp: Option< + extern "C" fn( + obj: *mut c_void, + token1: *const H5O_token_t, + token2: *const H5O_token_t, + cmp_value: *mut c_int, + ) -> herr_t, + >, + pub to_str: Option< + extern "C" fn( + obj: *mut c_void, + obj_type: H5I_type_t, + token: *const H5O_token_t, + token_str: *mut *mut c_char, + ) -> herr_t, + >, + pub from_str: Option< + extern "C" fn( + obj: *mut c_void, + obj_type: H5I_type_t, + token_str: *const c_char, + token: *mut H5O_token_t, + ) -> herr_t, + >, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] + pub enum H5VL_loc_type_t { + H5VL_OBJECT_BY_SELF, + H5VL_OBJECT_BY_NAME, + H5VL_OBJECT_BY_IDX, + H5VL_OBJECT_BY_TOKEN, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_loc_by_name_t { + pub name: *const c_char, + pub lapl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_loc_by_idx_t { + pub name: *const c_char, + pub idx_type: H5_index_t, + pub order: H5_iter_order_t, + pub n: hsize_t, + pub lapl_id: hid_t, + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_loc_by_token_t { + pub token: *mut H5O_token_t, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub union H5VL_loc_params_t_union { + pub loc_by_token: ManuallyDrop, + pub loc_by_name: ManuallyDrop, + pub loc_by_idx: ManuallyDrop, + } + + #[repr(C)] + #[derive(Copy, Clone)] + pub struct H5VL_loc_params_t { + pub obj_type: H5I_type_t, + pub type_: H5VL_loc_type_t, + pub loc_data: H5VL_loc_params_t_union, + } + + impl Debug for H5VL_loc_params_t { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let mut s = f.debug_struct("H5VL_lov_params_t"); + s.field("obj_type", &self.obj_type).field("type", &self.type_); + unsafe { + match self.type_ { + H5VL_loc_type_t::H5VL_OBJECT_BY_SELF => {} + H5VL_loc_type_t::H5VL_OBJECT_BY_NAME => { + s.field("loc_data", &self.loc_data.loc_by_name); + } + H5VL_loc_type_t::H5VL_OBJECT_BY_IDX => { + s.field("loc_data", &self.loc_data.loc_by_idx); + } + H5VL_loc_type_t::H5VL_OBJECT_BY_TOKEN => { + s.field("loc_data", &self.loc_data.loc_by_token); + } + } + }; + s.finish() + } + } + + #[repr(C)] + #[derive(Debug, Copy, Clone)] + pub struct H5VL_optional_args_t { + pub op_type: c_int, + pub args: *mut c_void, + } + + extern "C" { + pub fn H5VLattr_optional_op( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, attr_id: hid_t, + args: *mut H5VL_optional_args_t, dxpl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5VLdataset_optional_op( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, dset_id: hid_t, + args: *mut H5VL_optional_args_t, dxpl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5VLdatatype_optional_op( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, type_id: hid_t, + args: *mut H5VL_optional_args_t, dxpl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5VLfile_optional_op( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, file_id: hid_t, + args: *mut H5VL_optional_args_t, dxpl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5VLfind_opt_operation( + subcls: H5VL_subclass_t, op_name: *const c_char, op_val: *mut c_int, + ) -> herr_t; + pub fn H5VLgroup_optional_op( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, group_id: hid_t, + args: *mut H5VL_optional_args_t, dxpl_id: hid_t, es_id: hid_t, + ) -> herr_t; + pub fn H5VLlink_optional_op( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, lapl_id: hid_t, args: *mut H5VL_optional_args_t, dxpl_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5VLobject_optional_op( + app_file: *const c_char, app_func: *const c_char, app_line: c_uint, loc_id: hid_t, + name: *const c_char, lapl_id: hid_t, args: *mut H5VL_optional_args_t, dxpl_id: hid_t, + es_id: hid_t, + ) -> herr_t; + pub fn H5VLregister_opt_operation( + subcls: H5VL_subclass_t, op_name: *const c_char, op_val: *mut c_int, + ) -> herr_t; + pub fn H5VLrequest_optional_op( + req: *mut c_void, connector_id: hid_t, args: *mut H5VL_optional_args_t, + ) -> herr_t; + pub fn H5VLunregister_opt_operation( + subcls: H5VL_subclass_t, op_name: *const c_char, + ) -> herr_t; + } + + extern "C" { + pub fn H5VLfinish_lib_state() -> herr_t; + pub fn H5VLintrospect_get_cap_flags( + info: *const c_void, connector_id: hid_t, cap_flags: *mut c_uint, + ) -> herr_t; + pub fn H5VLstart_lib_state() -> herr_t; + } + + extern "C" { + pub fn H5VLobject_is_native(obj_id: hid_t, is_native: *mut hbool_t) -> herr_t; + } +} diff --git a/hdf5-sys/src/h5z.rs b/hdf5-sys/src/h5z.rs index 1620e757d..823a91599 100644 --- a/hdf5-sys/src/h5z.rs +++ b/hdf5-sys/src/h5z.rs @@ -1,8 +1,10 @@ +//! Configuring filters that process data during I/O operation use std::mem; pub use self::H5Z_EDC_t::*; pub use self::H5Z_SO_scale_type_t::*; pub use self::H5Z_cb_return_t::*; +pub use self::H5Z_class2_t as H5Z_class_t; use crate::internal_prelude::*; @@ -147,3 +149,14 @@ extern "C" { pub fn H5Zfilter_avail(id: H5Z_filter_t) -> htri_t; pub fn H5Zget_filter_info(filter: H5Z_filter_t, filter_config_flags: *mut c_uint) -> herr_t; } + +#[repr(C)] +#[derive(Debug, Copy, Clone)] +#[deprecated(note = "deprecated in HDF5 1.8.0, use H5Z_class2_t")] +pub struct H5Z_class1_t { + id: H5Z_filter_t, + name: *const c_char, + can_apply: H5Z_can_apply_func_t, + set_local: H5Z_set_local_func_t, + filter: H5Z_func_t, +} diff --git a/hdf5-sys/src/lib.rs b/hdf5-sys/src/lib.rs index 4686f796f..9dc672a73 100644 --- a/hdf5-sys/src/lib.rs +++ b/hdf5-sys/src/lib.rs @@ -1,7 +1,12 @@ +//! Rust bindings to the `hdf5` library for reading and writing data to and from storage #![allow(non_camel_case_types, non_snake_case, dead_code, deprecated)] #![cfg_attr(feature = "cargo-clippy", allow(clippy::unreadable_literal))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::missing_safety_doc))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::cognitive_complexity))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::upper_case_acronyms))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::wildcard_imports))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::module_name_repetitions))] +#![cfg_attr(docsrs, feature(doc_cfg))] macro_rules! extern_static { ($dest:ident, $src:ident) => { @@ -12,7 +17,7 @@ macro_rules! extern_static { }; } -#[cfg(all(feature = "mpio", not(h5_have_parallel)))] +#[cfg(all(feature = "mpio", not(feature = "have-parallel")))] compile_error!("Enabling \"mpio\" feature requires HDF5 library built with MPI support"); #[cfg(all(feature = "mpio", feature = "static"))] @@ -38,9 +43,12 @@ pub mod h5t; pub mod h5vl; pub mod h5z; -#[cfg(hdf5_1_8_15)] +#[cfg(feature = "1.8.15")] pub mod h5pl; +#[cfg(feature = "1.13.0")] +pub mod h5es; + #[allow(non_camel_case_types)] mod internal_prelude { pub use crate::h5::{ diff --git a/hdf5-types/Cargo.toml b/hdf5-types/Cargo.toml index 634df3c72..00a47f7e2 100644 --- a/hdf5-types/Cargo.toml +++ b/hdf5-types/Cargo.toml @@ -1,17 +1,26 @@ [package] name = "hdf5-types" -version = "0.7.0" # !V +version = "0.8.1" # !V authors = ["Ivan Smirnov "] keywords = ["hdf5"] license = "MIT OR Apache-2.0" description = "Native Rust equivalents of HDF5 types." repository = "https://github.com/aldanor/hdf5-rust" homepage = "https://github.com/aldanor/hdf5-rust" +readme = "README.md" edition = "2018" +build = "build.rs" +categories = ["encoding"] + +[features] +h5-alloc = [] [dependencies] ascii = "1.0" libc = "0.2" +hdf5-sys = { version = "0.8.1", path = "../hdf5-sys" } # !V +cfg-if = "1.0.0" [dev-dependencies] -quickcheck = { version = "0.9", default-features = false } +quickcheck = { version = "1.0", default-features = false } +unindent = "0.1" diff --git a/hdf5-types/README.md b/hdf5-types/README.md new file mode 100644 index 000000000..d81f8550c --- /dev/null +++ b/hdf5-types/README.md @@ -0,0 +1,3 @@ +# hdf5-types + +Create custom types for serializing to and from `hdf5` diff --git a/hdf5-types/build.rs b/hdf5-types/build.rs new file mode 100644 index 000000000..7419b5ebb --- /dev/null +++ b/hdf5-types/build.rs @@ -0,0 +1,6 @@ +fn main() { + println!("cargo:rerun-if-changed=build.rs"); + if std::env::var_os("DEP_HDF5_MSVC_DLL_INDIRECTION").is_some() { + println!("cargo:rustc-cfg=windows_dll"); + } +} diff --git a/hdf5-types/src/array.rs b/hdf5-types/src/array.rs index c377c108e..7eb300096 100644 --- a/hdf5-types/src/array.rs +++ b/hdf5-types/src/array.rs @@ -5,59 +5,6 @@ use std::ops::Deref; use std::ptr; use std::slice; -/* This trait is borrowed from arrayvec::Array (C) @bluss */ -pub unsafe trait Array: 'static { - type Item; - - fn as_ptr(&self) -> *const Self::Item; - fn as_mut_ptr(&mut self) -> *mut Self::Item; - fn capacity() -> usize; -} - -macro_rules! impl_array { - () => (); - - ($n:expr, $($ns:expr,)*) => ( - unsafe impl Array for [T; $n] { - type Item = T; - - #[inline(always)] - fn as_ptr(&self) -> *const T { - self as *const _ as *const _ - } - - #[inline(always)] - fn as_mut_ptr(&mut self) -> *mut T { - self as *mut _ as *mut _ - } - - #[inline(always)] - fn capacity() -> usize { - $n - } - } - - impl_array!($($ns,)*); - ); -} - -impl_array!( - 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, - 26, 27, 28, 29, 30, 31, -); -impl_array!( - 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, - 56, 57, 58, 59, 60, 61, 62, 63, -); -impl_array!( - 64, 70, 72, 80, 90, 96, 100, 110, 120, 128, 130, 140, 150, 160, 170, 180, 190, 192, 200, 210, - 220, 224, 230, 240, 250, -); -impl_array!( - 256, 300, 384, 400, 500, 512, 600, 700, 768, 800, 900, 1000, 1024, 2048, 4096, 8192, 16384, - 32768, -); - #[repr(C)] pub struct VarLenArray { len: usize, @@ -66,20 +13,20 @@ pub struct VarLenArray { } impl VarLenArray { - pub unsafe fn from_parts(p: *const T, len: usize) -> VarLenArray { + pub unsafe fn from_parts(p: *const T, len: usize) -> Self { let (len, ptr) = if !p.is_null() && len != 0 { - let dst = libc::malloc(len * mem::size_of::()); - ptr::copy_nonoverlapping(p, dst as *mut _, len); + let dst = crate::malloc(len * mem::size_of::()); + ptr::copy_nonoverlapping(p, dst.cast(), len); (len, dst) } else { (0, ptr::null_mut()) }; - VarLenArray { len, ptr: ptr as *const _, tag: PhantomData } + Self { len, ptr: ptr as *const _, tag: PhantomData } } #[inline] - pub fn from_slice(arr: &[T]) -> VarLenArray { - unsafe { VarLenArray::from_parts(arr.as_ptr(), arr.len()) } + pub fn from_slice(arr: &[T]) -> Self { + unsafe { Self::from_parts(arr.as_ptr(), arr.len()) } } #[inline] @@ -107,7 +54,7 @@ impl Drop for VarLenArray { fn drop(&mut self) { if !self.ptr.is_null() { unsafe { - libc::free(self.ptr as *mut _); + crate::free(self.ptr as *mut _); } self.ptr = ptr::null(); if self.len != 0 { @@ -119,8 +66,8 @@ impl Drop for VarLenArray { impl Clone for VarLenArray { #[inline] - fn clone(&self) -> VarLenArray { - VarLenArray::from_slice(&*self) + fn clone(&self) -> Self { + Self::from_slice(&*self) } } @@ -139,29 +86,29 @@ impl Deref for VarLenArray { impl<'a, T: Copy> From<&'a [T]> for VarLenArray { #[inline] - fn from(arr: &[T]) -> VarLenArray { - VarLenArray::from_slice(arr) + fn from(arr: &[T]) -> Self { + Self::from_slice(arr) } } -impl Into> for VarLenArray { +impl From> for Vec { #[inline] - fn into(self) -> Vec { - self.iter().cloned().collect() + fn from(v: VarLenArray) -> Self { + v.iter().copied().collect() } } -impl> From for VarLenArray { +impl From<[T; N]> for VarLenArray { #[inline] - fn from(arr: A) -> VarLenArray { - unsafe { VarLenArray::from_parts(arr.as_ptr(), A::capacity()) } + fn from(arr: [T; N]) -> Self { + unsafe { Self::from_parts(arr.as_ptr(), arr.len()) } } } impl Default for VarLenArray { #[inline] - fn default() -> VarLenArray { - unsafe { VarLenArray::from_parts(ptr::null(), 0) } + fn default() -> Self { + unsafe { Self::from_parts(ptr::null(), 0) } } } @@ -181,10 +128,10 @@ impl PartialEq<[T]> for VarLenArray { } } -impl> PartialEq for VarLenArray { +impl PartialEq<[T; N]> for VarLenArray { #[inline] - fn eq(&self, other: &A) -> bool { - self.as_slice() == unsafe { slice::from_raw_parts(other.as_ptr(), A::capacity()) } + fn eq(&self, other: &[T; N]) -> bool { + self.as_slice() == other } } @@ -197,19 +144,10 @@ impl fmt::Debug for VarLenArray { #[cfg(test)] pub mod tests { - use super::{Array, VarLenArray}; + use super::VarLenArray; type S = VarLenArray; - #[test] - pub fn test_array_trait() { - type T = [u32; 256]; - assert_eq!(::capacity(), 256); - let mut arr = [1, 2, 3]; - assert_eq!(arr.as_ptr(), &arr[0] as *const _); - assert_eq!(arr.as_mut_ptr(), &mut arr[0] as *mut _); - } - #[test] pub fn test_vla_empty_default() { assert_eq!(&*S::default(), &[]); diff --git a/hdf5-types/src/dyn_value.rs b/hdf5-types/src/dyn_value.rs new file mode 100644 index 000000000..cc72cac20 --- /dev/null +++ b/hdf5-types/src/dyn_value.rs @@ -0,0 +1,1117 @@ +use std::fmt::{self, Debug, Display}; +use std::mem; +use std::ptr; +use std::slice; + +use crate::h5type::{hvl_t, CompoundType, EnumType, FloatSize, H5Type, IntSize, TypeDescriptor}; +use crate::string::{VarLenAscii, VarLenUnicode}; + +fn read_raw(buf: &[u8]) -> T { + debug_assert_eq!(mem::size_of::(), buf.len()); + unsafe { *(buf.as_ptr().cast::()) } +} + +fn write_raw(out: &mut [u8], value: T) { + debug_assert_eq!(mem::size_of::(), out.len()); + unsafe { + *(out.as_mut_ptr().cast()) = value; + } +} + +unsafe trait DynDrop { + fn dyn_drop(&mut self) {} +} + +unsafe trait DynClone { + fn dyn_clone(&mut self, out: &mut [u8]); +} + +#[derive(Copy, Clone, PartialEq, Eq)] +pub enum DynInteger { + Int8(i8), + Int16(i16), + Int32(i32), + Int64(i64), + UInt8(u8), + UInt16(u16), + UInt32(u32), + UInt64(u64), +} + +impl DynInteger { + pub(self) fn read(buf: &[u8], signed: bool, size: IntSize) -> Self { + match (signed, size) { + (true, IntSize::U1) => Self::Int8(read_raw(buf)), + (true, IntSize::U2) => Self::Int16(read_raw(buf)), + (true, IntSize::U4) => Self::Int32(read_raw(buf)), + (true, IntSize::U8) => Self::Int64(read_raw(buf)), + (false, IntSize::U1) => Self::UInt8(read_raw(buf)), + (false, IntSize::U2) => Self::UInt16(read_raw(buf)), + (false, IntSize::U4) => Self::UInt32(read_raw(buf)), + (false, IntSize::U8) => Self::UInt64(read_raw(buf)), + } + } + + pub(self) fn as_u64(self) -> u64 { + match self { + Self::Int8(x) => x as _, + Self::Int16(x) => x as _, + Self::Int32(x) => x as _, + Self::Int64(x) => x as _, + Self::UInt8(x) => x as _, + Self::UInt16(x) => x as _, + Self::UInt32(x) => x as _, + Self::UInt64(x) => x as _, + } + } +} + +unsafe impl DynClone for DynInteger { + fn dyn_clone(&mut self, out: &mut [u8]) { + match self { + Self::Int8(x) => write_raw(out, *x), + Self::Int16(x) => write_raw(out, *x), + Self::Int32(x) => write_raw(out, *x), + Self::Int64(x) => write_raw(out, *x), + Self::UInt8(x) => write_raw(out, *x), + Self::UInt16(x) => write_raw(out, *x), + Self::UInt32(x) => write_raw(out, *x), + Self::UInt64(x) => write_raw(out, *x), + } + } +} + +impl Debug for DynInteger { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Self::Int8(x) => Debug::fmt(&x, f), + Self::Int16(x) => Debug::fmt(&x, f), + Self::Int32(x) => Debug::fmt(&x, f), + Self::Int64(x) => Debug::fmt(&x, f), + Self::UInt8(x) => Debug::fmt(&x, f), + Self::UInt16(x) => Debug::fmt(&x, f), + Self::UInt32(x) => Debug::fmt(&x, f), + Self::UInt64(x) => Debug::fmt(&x, f), + } + } +} + +impl Display for DynInteger { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl From for DynScalar { + fn from(value: DynInteger) -> Self { + Self::Integer(value) + } +} + +impl From for DynValue<'_> { + fn from(value: DynInteger) -> Self { + DynScalar::Integer(value).into() + } +} + +#[derive(Copy, Clone, PartialEq)] +pub enum DynScalar { + Integer(DynInteger), + Float32(f32), + Float64(f64), + Boolean(bool), +} + +unsafe impl DynClone for DynScalar { + fn dyn_clone(&mut self, out: &mut [u8]) { + match self { + Self::Integer(x) => x.dyn_clone(out), + Self::Float32(x) => write_raw(out, *x), + Self::Float64(x) => write_raw(out, *x), + Self::Boolean(x) => write_raw(out, *x), + } + } +} + +impl Debug for DynScalar { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::Integer(x) => Debug::fmt(&x, f), + Self::Float32(x) => Debug::fmt(&x, f), + Self::Float64(x) => Debug::fmt(&x, f), + Self::Boolean(x) => Debug::fmt(&x, f), + } + } +} + +impl Display for DynScalar { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl From for DynValue<'static> { + fn from(value: DynScalar) -> Self { + DynValue::Scalar(value) + } +} + +#[derive(Copy, Clone)] +pub struct DynEnum<'a> { + tp: &'a EnumType, + value: DynInteger, +} + +impl<'a> DynEnum<'a> { + pub fn new(tp: &'a EnumType, value: DynInteger) -> Self { + Self { tp, value } + } + + pub fn name(&self) -> Option<&str> { + let value = self.value.as_u64(); + for member in &self.tp.members { + if member.value == value { + return Some(&member.name); + } + } + None + } +} + +unsafe impl DynClone for DynEnum<'_> { + fn dyn_clone(&mut self, out: &mut [u8]) { + self.value.dyn_clone(out); + } +} + +impl PartialEq for DynEnum<'_> { + fn eq(&self, other: &Self) -> bool { + self.value == other.value + } +} + +impl Eq for DynEnum<'_> {} + +impl Debug for DynEnum<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self.name() { + Some(name) => f.write_str(name), + None => Debug::fmt(&self.value, f), + } + } +} + +impl Display for DynEnum<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl<'a> From> for DynValue<'a> { + fn from(value: DynEnum<'a>) -> Self { + DynValue::Enum(value) + } +} + +pub struct DynCompound<'a> { + tp: &'a CompoundType, + buf: &'a [u8], +} + +impl<'a> DynCompound<'a> { + pub fn new(tp: &'a CompoundType, buf: &'a [u8]) -> Self { + Self { tp, buf } + } + + pub fn iter(&self) -> impl Iterator { + self.tp.fields.iter().map(move |field| { + ( + field.name.as_ref(), + DynValue::new(&field.ty, &self.buf[field.offset..(field.offset + field.ty.size())]), + ) + }) + } +} + +unsafe impl DynDrop for DynCompound<'_> { + fn dyn_drop(&mut self) { + for (_, mut value) in self.iter() { + value.dyn_drop(); + } + } +} + +unsafe impl DynClone for DynCompound<'_> { + fn dyn_clone(&mut self, out: &mut [u8]) { + debug_assert_eq!(out.len(), self.tp.size); + for (i, (_, mut value)) in self.iter().enumerate() { + let field = &self.tp.fields[i]; + value.dyn_clone(&mut out[field.offset..(field.offset + field.ty.size())]); + } + } +} + +impl PartialEq for DynCompound<'_> { + fn eq(&self, other: &Self) -> bool { + let (mut it1, mut it2) = (self.iter(), other.iter()); + loop { + match (it1.next(), it2.next()) { + (Some(v1), Some(v2)) => { + if v1 != v2 { + return false; + } + } + (None, None) => return true, + _ => return false, + } + } + } +} + +struct RawStr<'a>(&'a str); + +impl Debug for RawStr<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(self.0) + } +} + +impl Debug for DynCompound<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut b = f.debug_map(); + for (name, value) in self.iter() { + b.entry(&RawStr(name), &value); + } + b.finish() + } +} + +impl Display for DynCompound<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl<'a> From> for DynValue<'a> { + fn from(value: DynCompound<'a>) -> Self { + DynValue::Compound(value) + } +} + +pub struct DynArray<'a> { + tp: &'a TypeDescriptor, + buf: &'a [u8], + len: Option, +} + +impl<'a> DynArray<'a> { + pub fn new(tp: &'a TypeDescriptor, buf: &'a [u8], len: Option) -> Self { + Self { tp, buf, len } + } + + fn get_ptr(&self) -> *const u8 { + match self.len { + Some(_) => self.buf.as_ptr(), + None => read_raw::(self.buf).ptr as *const u8, + } + } + + fn get_len(&self) -> usize { + match self.len { + Some(len) => len, + None => read_raw::(self.buf).len, + } + } + + pub fn iter(&self) -> impl Iterator { + let ptr = self.get_ptr(); + let len = self.get_len(); + let size = self.tp.size(); + let buf = if !ptr.is_null() && len != 0 { + unsafe { slice::from_raw_parts(ptr, len * size) } + } else { + [].as_ref() + }; + (0..len).map(move |i| DynValue::new(self.tp, &buf[(i * size)..((i + 1) * size)])) + } +} + +unsafe impl DynDrop for DynArray<'_> { + fn dyn_drop(&mut self) { + for mut value in self.iter() { + value.dyn_drop(); + } + if self.len.is_none() && !self.get_ptr().is_null() { + unsafe { + crate::free(self.get_ptr() as *mut _); + } + } + } +} + +unsafe impl DynClone for DynArray<'_> { + fn dyn_clone(&mut self, out: &mut [u8]) { + let (len, ptr, size) = (self.get_len(), self.get_ptr(), self.tp.size()); + let out = if self.len.is_none() { + debug_assert_eq!(out.len(), mem::size_of::()); + if self.get_ptr().is_null() { + return; + } + unsafe { + let dst = crate::malloc(len * size).cast(); + ptr::copy_nonoverlapping(ptr, dst, len * size); + // Alignment is always at least usize for pointers from `hdf5-c` + let outptr = out.as_mut_ptr().cast::(); + ptr::write(ptr::addr_of_mut!((*outptr).ptr), dst.cast()); + slice::from_raw_parts_mut(dst, len * size) + } + } else { + out + }; + debug_assert_eq!(out.len(), len * size); + for (i, mut value) in self.iter().enumerate() { + value.dyn_clone(&mut out[(i * size)..((i + 1) * size)]); + } + } +} + +impl PartialEq for DynArray<'_> { + fn eq(&self, other: &Self) -> bool { + let (mut it1, mut it2) = (self.iter(), other.iter()); + loop { + match (it1.next(), it2.next()) { + (Some(v1), Some(v2)) => { + if v1 != v2 { + return false; + } + } + (None, None) => return true, + _ => return false, + } + } + } +} + +impl Debug for DynArray<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut b = f.debug_list(); + for value in self.iter() { + b.entry(&value); + } + b.finish() + } +} + +impl Display for DynArray<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl<'a> From> for DynValue<'a> { + fn from(value: DynArray<'a>) -> Self { + DynValue::Array(value) + } +} + +pub struct DynFixedString<'a> { + buf: &'a [u8], + unicode: bool, +} + +impl<'a> DynFixedString<'a> { + pub fn new(buf: &'a [u8], unicode: bool) -> Self { + Self { buf, unicode } + } + + pub fn raw_len(&self) -> usize { + self.buf.iter().rev().skip_while(|&c| *c == 0).count() + } + + pub fn get_buf(&self) -> &[u8] { + &self.buf[..self.raw_len()] + } +} + +unsafe impl DynClone for DynFixedString<'_> { + fn dyn_clone(&mut self, out: &mut [u8]) { + debug_assert_eq!(self.buf.len(), out.len()); + out.clone_from_slice(self.buf); + } +} + +impl PartialEq for DynFixedString<'_> { + fn eq(&self, other: &Self) -> bool { + self.unicode == other.unicode && self.get_buf() == other.get_buf() + } +} + +impl Eq for DynFixedString<'_> {} + +impl Debug for DynFixedString<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let s = unsafe { std::str::from_utf8_unchecked(self.get_buf()) }; + Debug::fmt(&s, f) + } +} + +impl Display for DynFixedString<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl<'a> From> for DynString<'a> { + fn from(value: DynFixedString<'a>) -> Self { + DynString::Fixed(value) + } +} + +impl<'a> From> for DynValue<'a> { + fn from(value: DynFixedString<'a>) -> Self { + DynString::Fixed(value).into() + } +} + +pub struct DynVarLenString<'a> { + buf: &'a [u8], + unicode: bool, +} + +impl<'a> DynVarLenString<'a> { + pub fn new(buf: &'a [u8], unicode: bool) -> Self { + Self { buf, unicode } + } + + fn get_ptr(&self) -> *const u8 { + if self.unicode { + self.as_unicode().as_ptr() + } else { + self.as_ascii().as_ptr() + } + } + + fn raw_len(&self) -> usize { + if self.unicode { + self.as_unicode().as_bytes().len() + } else { + self.as_ascii().as_bytes().len() + } + } + + fn as_ascii(&self) -> &VarLenAscii { + // Alignment is always at least usize for pointers from `hdf5-c` + unsafe { &*(self.buf.as_ptr().cast::()) } + } + + fn as_unicode(&self) -> &VarLenUnicode { + // Alignment is always at least usize for pointers from `hdf5-c` + unsafe { &*(self.buf.as_ptr().cast::()) } + } +} + +unsafe impl DynDrop for DynVarLenString<'_> { + fn dyn_drop(&mut self) { + if !self.get_ptr().is_null() { + unsafe { + crate::free(self.get_ptr() as *mut _); + } + } + } +} + +unsafe impl DynClone for DynVarLenString<'_> { + fn dyn_clone(&mut self, out: &mut [u8]) { + debug_assert_eq!(out.len(), mem::size_of::()); + if !self.get_ptr().is_null() { + unsafe { + let raw_len = self.raw_len(); + let dst = crate::malloc(raw_len + 1).cast(); + ptr::copy_nonoverlapping(self.get_ptr(), dst, raw_len); + dst.add(raw_len).write(0); + // Alignment is always at least usize for pointers from `hdf5-c` + let outptr = out.as_mut_ptr().cast::<*const u8>(); + ptr::write(outptr, dst.cast()); + } + } + } +} + +impl PartialEq for DynVarLenString<'_> { + fn eq(&self, other: &Self) -> bool { + match (self.unicode, other.unicode) { + (true, true) => self.as_unicode() == other.as_unicode(), + (false, false) => self.as_ascii() == other.as_ascii(), + _ => false, + } + } +} + +impl Eq for DynVarLenString<'_> {} + +impl Debug for DynVarLenString<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if self.unicode { + Debug::fmt(&self.as_unicode(), f) + } else { + Debug::fmt(&self.as_ascii(), f) + } + } +} + +impl Display for DynVarLenString<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl<'a> From> for DynString<'a> { + fn from(value: DynVarLenString<'a>) -> Self { + DynString::VarLen(value) + } +} + +impl<'a> From> for DynValue<'a> { + fn from(value: DynVarLenString<'a>) -> Self { + DynString::VarLen(value).into() + } +} + +#[derive(PartialEq, Eq)] +pub enum DynString<'a> { + Fixed(DynFixedString<'a>), + VarLen(DynVarLenString<'a>), +} + +unsafe impl DynDrop for DynString<'_> { + fn dyn_drop(&mut self) { + if let DynString::VarLen(string) = self { + string.dyn_drop(); + } + } +} + +unsafe impl DynClone for DynString<'_> { + fn dyn_clone(&mut self, out: &mut [u8]) { + match self { + Self::Fixed(x) => x.dyn_clone(out), + Self::VarLen(x) => x.dyn_clone(out), + } + } +} + +impl Debug for DynString<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::Fixed(x) => Debug::fmt(&x, f), + Self::VarLen(x) => Debug::fmt(&x, f), + } + } +} + +impl Display for DynString<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +impl<'a> From> for DynValue<'a> { + fn from(value: DynString<'a>) -> Self { + DynValue::String(value) + } +} + +#[derive(PartialEq)] +pub enum DynValue<'a> { + Scalar(DynScalar), + Enum(DynEnum<'a>), + Compound(DynCompound<'a>), + Array(DynArray<'a>), + String(DynString<'a>), +} + +impl<'a> DynValue<'a> { + pub fn new(tp: &'a TypeDescriptor, buf: &'a [u8]) -> Self { + use TypeDescriptor::*; + debug_assert_eq!(tp.size(), buf.len()); + + match tp { + Integer(size) | Unsigned(size) => DynInteger::read(buf, true, *size).into(), + Float(FloatSize::U4) => DynScalar::Float32(read_raw(buf)).into(), + Float(FloatSize::U8) => DynScalar::Float64(read_raw(buf)).into(), + Boolean => DynScalar::Boolean(read_raw(buf)).into(), + Enum(ref tp) => DynEnum::new(tp, DynInteger::read(buf, tp.signed, tp.size)).into(), + Compound(ref tp) => DynCompound::new(tp, buf).into(), + FixedArray(ref tp, n) => DynArray::new(tp, buf, Some(*n)).into(), + VarLenArray(ref tp) => DynArray::new(tp, buf, None).into(), + FixedAscii(_) => DynFixedString::new(buf, false).into(), + FixedUnicode(_) => DynFixedString::new(buf, true).into(), + VarLenAscii => DynVarLenString::new(buf, false).into(), + VarLenUnicode => DynVarLenString::new(buf, true).into(), + } + } +} + +unsafe impl DynDrop for DynValue<'_> { + fn dyn_drop(&mut self) { + match self { + Self::Compound(x) => x.dyn_drop(), + Self::Array(x) => x.dyn_drop(), + Self::String(x) => x.dyn_drop(), + _ => (), + } + } +} + +unsafe impl DynClone for DynValue<'_> { + fn dyn_clone(&mut self, out: &mut [u8]) { + match self { + Self::Scalar(x) => x.dyn_clone(out), + Self::Enum(x) => x.dyn_clone(out), + Self::Compound(x) => x.dyn_clone(out), + Self::Array(x) => x.dyn_clone(out), + Self::String(x) => x.dyn_clone(out), + } + } +} + +impl Debug for DynValue<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::Scalar(x) => Debug::fmt(&x, f), + Self::Enum(x) => Debug::fmt(&x, f), + Self::Compound(x) => Debug::fmt(&x, f), + Self::Array(x) => Debug::fmt(&x, f), + Self::String(x) => Debug::fmt(&x, f), + } + } +} + +impl Display for DynValue<'_> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +pub struct OwnedDynValue { + tp: TypeDescriptor, + buf: Box<[u8]>, +} + +impl OwnedDynValue { + pub fn new(value: T) -> Self { + let ptr = (&value as *const T).cast::(); + let len = mem::size_of_val(&value); + let buf = unsafe { std::slice::from_raw_parts(ptr, len) }; + mem::forget(value); + Self { tp: T::type_descriptor(), buf: buf.to_owned().into_boxed_slice() } + } + + pub fn get(&self) -> DynValue { + DynValue::new(&self.tp, &self.buf) + } + + pub fn type_descriptor(&self) -> &TypeDescriptor { + &self.tp + } + + #[doc(hidden)] + pub unsafe fn get_buf(&self) -> &[u8] { + &self.buf + } + + #[doc(hidden)] + pub unsafe fn from_raw(tp: TypeDescriptor, buf: Box<[u8]>) -> Self { + Self { tp, buf } + } + + /// Cast to the concrete type + /// + /// Will fail if the type-descriptors are not equal + pub fn cast(mut self) -> Result { + use mem::MaybeUninit; + if self.tp != T::type_descriptor() { + return Err(self); + } + debug_assert_eq!(self.tp.size(), self.buf.len()); + let mut out = MaybeUninit::::uninit(); + unsafe { + ptr::copy_nonoverlapping( + self.buf.as_ptr(), + out.as_mut_ptr().cast::(), + self.buf.len(), + ); + } + // For safety we must ensure any nested structures are not live at the same time, + // as this could cause a double free in `dyn_drop`. + // We must deallocate only the top level of Self + + // The zero-sized array has a special case to not drop ptr if len is zero, + // so `dyn_drop` of `DynArray` is a nop + self.tp = <[u8; 0]>::type_descriptor(); + // We must also swap out the buffer to ensure we can create the `DynValue` + let mut b: Box<[u8]> = Box::new([]); + mem::swap(&mut self.buf, &mut b); + + Ok(unsafe { out.assume_init() }) + } +} + +impl From for OwnedDynValue { + fn from(value: T) -> Self { + Self::new(value) + } +} + +impl Drop for OwnedDynValue { + fn drop(&mut self) { + self.get().dyn_drop(); + } +} + +impl Clone for OwnedDynValue { + fn clone(&self) -> Self { + let mut buf = self.buf.clone(); + self.get().dyn_clone(&mut buf); + Self { tp: self.tp.clone(), buf } + } +} + +impl PartialEq for OwnedDynValue { + fn eq(&self, other: &Self) -> bool { + self.get() == other.get() + } +} + +impl Debug for OwnedDynValue { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(&self.get(), f) + } +} + +impl Display for OwnedDynValue { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + Debug::fmt(self, f) + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use unindent::unindent; + + use crate::array::VarLenArray; + use crate::h5type::{TypeDescriptor as TD, *}; + use crate::string::{FixedAscii, FixedUnicode, VarLenAscii, VarLenUnicode}; + + use super::*; + + #[derive(Copy, Clone, Debug, PartialEq, Eq)] + #[repr(i16)] + enum Color { + Red = -10_000, + Green = 0, + Blue = 10_000, + } + + #[derive(Copy, Clone, Debug, PartialEq)] + #[repr(C)] + pub struct Point { + coords: [f32; 2], + color: Color, + nice: bool, + } + + #[derive(Clone, Debug, PartialEq)] + #[repr(C)] + struct Data { + points: VarLenArray, + fa: FixedAscii<5>, + fu: FixedUnicode<5>, + va: VarLenAscii, + vu: VarLenUnicode, + } + + #[derive(Clone, Debug, PartialEq)] + #[repr(C)] + struct BigStruct { + ints: (i8, i16, i32, i64), + uints: (u8, u16, u32, u64), + floats: (f32, f64), + data: Data, + } + + fn td_color() -> TD { + TD::Enum(EnumType { + size: IntSize::U2, + signed: true, + members: vec![ + EnumMember { name: "Red".into(), value: -10_000i16 as _ }, + EnumMember { name: "Green".into(), value: 0 }, + EnumMember { name: "Blue".into(), value: 10_000 }, + ], + }) + } + + fn td_point() -> TD { + let coords = TD::FixedArray(Box::new(TD::Float(FloatSize::U4)), 2); + TD::Compound(CompoundType { + fields: Vec::from( + [ + CompoundField::new("coords", coords, 0, 0), + CompoundField::new("color", td_color(), 8, 1), + CompoundField::new("nice", TD::Boolean, 10, 2), + ] + .as_ref(), + ), + size: 12, + }) + } + + fn td_data() -> TD { + let points = TD::VarLenArray(Box::new(td_point())); + TD::Compound(CompoundType { + fields: Vec::from( + [ + CompoundField::new("points", points, 0, 0), + CompoundField::new("fa", TD::FixedAscii(5), 16, 1), + CompoundField::new("fu", TD::FixedUnicode(5), 21, 2), + CompoundField::new("va", TD::VarLenAscii, 32, 3), + CompoundField::new("vu", TD::VarLenUnicode, 40, 4), + ] + .as_ref(), + ), + size: 48, + }) + } + + fn td_big_struct() -> TD { + let ints = TD::Compound(CompoundType { + fields: Vec::from( + [ + CompoundField::typed::("2", 0, 2), + CompoundField::typed::("1", 4, 1), + CompoundField::typed::("0", 6, 0), + CompoundField::typed::("3", 8, 3), + ] + .as_ref(), + ), + size: 16, + }); + let uints = TD::Compound(CompoundType { + fields: Vec::from( + [ + CompoundField::typed::("2", 0, 2), + CompoundField::typed::("1", 4, 1), + CompoundField::typed::("0", 6, 0), + CompoundField::typed::("3", 8, 3), + ] + .as_ref(), + ), + size: 16, + }); + let floats = TD::Compound(CompoundType { + fields: Vec::from( + [CompoundField::typed::("0", 0, 0), CompoundField::typed::("1", 8, 1)] + .as_ref(), + ), + size: 16, + }); + TD::Compound(CompoundType { + fields: Vec::from( + [ + CompoundField::new("ints", ints, 0, 0), + CompoundField::new("uints", uints, 16, 1), + CompoundField::new("floats", floats, 32, 2), + CompoundField::new("data", td_data(), 48, 3), + ] + .as_ref(), + ), + size: 96, + }) + } + + fn big_struct_1() -> BigStruct { + BigStruct { + ints: (-10, 20, -30, 40), + uints: (30, 40, 50, 60), + floats: (-3.14, 2.71), + data: Data { + points: VarLenArray::from_slice( + [ + Point { coords: [-1.0, 2.0], color: Color::Red, nice: true }, + Point { coords: [0.1, 0.], color: Color::Green, nice: false }, + Point { coords: [10., 0.], color: Color::Blue, nice: true }, + ] + .as_ref(), + ), + fa: FixedAscii::from_ascii(b"12345").unwrap(), + fu: FixedUnicode::from_str("∀").unwrap(), + va: VarLenAscii::from_ascii(b"wat").unwrap(), + vu: VarLenUnicode::from_str("⨁∀").unwrap(), + }, + } + } + + fn big_struct_2() -> BigStruct { + BigStruct { + ints: (1, 2, 3, 4), + uints: (3, 4, 5, 6), + floats: (-1., 2.), + data: Data { + points: VarLenArray::from_slice([].as_ref()), + fa: FixedAscii::from_ascii(b"").unwrap(), + fu: FixedUnicode::from_str("").unwrap(), + va: VarLenAscii::from_ascii(b"").unwrap(), + vu: VarLenUnicode::from_str("").unwrap(), + }, + } + } + + unsafe impl crate::h5type::H5Type for BigStruct { + fn type_descriptor() -> TypeDescriptor { + td_big_struct() + } + } + + #[test] + fn test_dyn_value_from() { + assert_eq!(OwnedDynValue::from(-42i16), OwnedDynValue::new(-42i16)); + let s = big_struct_2(); + assert_eq!(OwnedDynValue::from(s.clone()), OwnedDynValue::new(s.clone())); + } + + #[test] + fn test_dyn_value_clone_drop() { + let val1 = OwnedDynValue::new(big_struct_1()); + let val2 = OwnedDynValue::new(big_struct_2()); + + assert_eq!(val1, val1); + assert_eq!(val1.clone(), val1); + assert_eq!(val1.clone(), val1.clone().clone()); + + assert_eq!(val2, val2); + assert_eq!(val2.clone(), val2); + assert_eq!(val2.clone(), val2.clone().clone()); + + assert_ne!(val1, val2); + assert_ne!(val2, val1); + } + + #[test] + fn test_dyn_value_display() { + let val1 = OwnedDynValue::new(big_struct_1()); + let val2 = OwnedDynValue::new(big_struct_2()); + + let val1_flat = unindent( + "\ + {ints: {2: -30, 1: 20, 0: -10, 3: 40}, \ + uints: {2: 50, 1: 40, 0: 30, 3: 60}, \ + floats: {0: -3.14, 1: 2.71}, \ + data: {points: [{coords: [-1.0, 2.0], color: Red, nice: true}, \ + {coords: [0.1, 0.0], color: Green, nice: false}, \ + {coords: [10.0, 0.0], color: Blue, nice: true}], \ + fa: \"12345\", fu: \"∀\", va: \"wat\", vu: \"⨁∀\"}}", + ); + + let val1_nice = unindent( + r#" + { + ints: { + 2: -30, + 1: 20, + 0: -10, + 3: 40, + }, + uints: { + 2: 50, + 1: 40, + 0: 30, + 3: 60, + }, + floats: { + 0: -3.14, + 1: 2.71, + }, + data: { + points: [ + { + coords: [ + -1.0, + 2.0, + ], + color: Red, + nice: true, + }, + { + coords: [ + 0.1, + 0.0, + ], + color: Green, + nice: false, + }, + { + coords: [ + 10.0, + 0.0, + ], + color: Blue, + nice: true, + }, + ], + fa: "12345", + fu: "∀", + va: "wat", + vu: "⨁∀", + }, + }"#, + ); + + let val2_flat = unindent( + "\ + {ints: {2: 3, 1: 2, 0: 1, 3: 4}, \ + uints: {2: 5, 1: 4, 0: 3, 3: 6}, \ + floats: {0: -1.0, 1: 2.0}, \ + data: {points: [], fa: \"\", fu: \"\", va: \"\", vu: \"\"}}", + ); + + let val2_nice = unindent( + r#" + { + ints: { + 2: 3, + 1: 2, + 0: 1, + 3: 4, + }, + uints: { + 2: 5, + 1: 4, + 0: 3, + 3: 6, + }, + floats: { + 0: -1.0, + 1: 2.0, + }, + data: { + points: [], + fa: "", + fu: "", + va: "", + vu: "", + }, + }"#, + ); + + assert_eq!(format!("{}", val1), val1_flat); + assert_eq!(format!("{:?}", val1), val1_flat); + assert_eq!(format!("{:#?}", val1.clone()), val1_nice); + + assert_eq!(format!("{}", val2), val2_flat); + assert_eq!(format!("{:?}", val2), val2_flat); + assert_eq!(format!("{:#?}", val2.clone()), val2_nice); + } +} diff --git a/hdf5-types/src/h5type.rs b/hdf5-types/src/h5type.rs index 051e07986..7f50dc12a 100644 --- a/hdf5-types/src/h5type.rs +++ b/hdf5-types/src/h5type.rs @@ -3,14 +3,15 @@ use std::mem; use std::os::raw::c_void; use std::ptr; -use crate::array::{Array, VarLenArray}; +use crate::array::VarLenArray; use crate::string::{FixedAscii, FixedUnicode, VarLenAscii, VarLenUnicode}; #[allow(non_camel_case_types)] #[repr(C)] -struct hvl_t { - len: usize, - p: *mut c_void, +#[derive(Copy, Clone)] +pub(crate) struct hvl_t { + pub len: usize, + pub ptr: *mut c_void, } #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] @@ -22,15 +23,15 @@ pub enum IntSize { } impl IntSize { - pub fn from_int(size: usize) -> Option { + pub const fn from_int(size: usize) -> Option { if size == 1 { - Some(IntSize::U1) + Some(Self::U1) } else if size == 2 { - Some(IntSize::U2) + Some(Self::U2) } else if size == 4 { - Some(IntSize::U4) + Some(Self::U4) } else if size == 8 { - Some(IntSize::U8) + Some(Self::U8) } else { None } @@ -44,11 +45,11 @@ pub enum FloatSize { } impl FloatSize { - pub fn from_int(size: usize) -> Option { + pub const fn from_int(size: usize) -> Option { if size == 4 { - Some(FloatSize::U4) + Some(Self::U4) } else if size == 8 { - Some(FloatSize::U8) + Some(Self::U8) } else { None } @@ -104,12 +105,12 @@ pub struct CompoundType { } impl CompoundType { - pub fn to_c_repr(&self) -> CompoundType { + pub fn to_c_repr(&self) -> Self { let mut layout = self.clone(); layout.fields.sort_by_key(|f| f.index); let mut offset = 0; let mut max_align = 1; - for f in layout.fields.iter_mut() { + for f in &mut layout.fields { f.ty = f.ty.to_c_repr(); let align = f.ty.c_alignment(); while offset % align != 0 { @@ -126,11 +127,11 @@ impl CompoundType { layout } - pub fn to_packed_repr(&self) -> CompoundType { + pub fn to_packed_repr(&self) -> Self { let mut layout = self.clone(); layout.fields.sort_by_key(|f| f.index); layout.size = 0; - for f in layout.fields.iter_mut() { + for f in &mut layout.fields { f.ty = f.ty.to_packed_repr(); f.offset = layout.size; layout.size += f.ty.size(); @@ -147,10 +148,10 @@ pub enum TypeDescriptor { Boolean, Enum(EnumType), Compound(CompoundType), - FixedArray(Box, usize), + FixedArray(Box, usize), FixedAscii(usize), FixedUnicode(usize), - VarLenArray(Box), + VarLenArray(Box), VarLenAscii, VarLenUnicode, } @@ -183,53 +184,45 @@ impl Display for TypeDescriptor { impl TypeDescriptor { pub fn size(&self) -> usize { - use self::TypeDescriptor::*; - match *self { - Integer(size) | Unsigned(size) => size as _, - Float(size) => size as _, - Boolean => 1, - Enum(ref enum_type) => enum_type.size as _, - Compound(ref compound) => compound.size, - FixedArray(ref ty, len) => ty.size() * len, - FixedAscii(len) | FixedUnicode(len) => len, - VarLenArray(_) => mem::size_of::(), - VarLenAscii | VarLenUnicode => mem::size_of::<*const u8>(), + Self::Integer(size) | Self::Unsigned(size) => size as _, + Self::Float(size) => size as _, + Self::Boolean => 1, + Self::Enum(ref enum_type) => enum_type.size as _, + Self::Compound(ref compound) => compound.size, + Self::FixedArray(ref ty, len) => ty.size() * len, + Self::FixedAscii(len) | Self::FixedUnicode(len) => len, + Self::VarLenArray(_) => mem::size_of::(), + Self::VarLenAscii | Self::VarLenUnicode => mem::size_of::<*const u8>(), } } fn c_alignment(&self) -> usize { - use self::TypeDescriptor::*; - match *self { - Compound(ref compound) => { + Self::Compound(ref compound) => { compound.fields.iter().map(|f| f.ty.c_alignment()).max().unwrap_or(1) } - FixedArray(ref ty, _) => ty.c_alignment(), - FixedAscii(_) | FixedUnicode(_) => 1, - VarLenArray(_) => mem::size_of::(), + Self::FixedArray(ref ty, _) => ty.c_alignment(), + Self::FixedAscii(_) | Self::FixedUnicode(_) => 1, + Self::VarLenArray(_) => mem::size_of::(), _ => self.size(), } } pub fn to_c_repr(&self) -> Self { - use self::TypeDescriptor::*; - match *self { - Compound(ref compound) => Compound(compound.to_c_repr()), - FixedArray(ref ty, size) => FixedArray(Box::new(ty.to_c_repr()), size), - VarLenArray(ref ty) => VarLenArray(Box::new(ty.to_c_repr())), + Self::Compound(ref compound) => Self::Compound(compound.to_c_repr()), + Self::FixedArray(ref ty, size) => Self::FixedArray(Box::new(ty.to_c_repr()), size), + Self::VarLenArray(ref ty) => Self::VarLenArray(Box::new(ty.to_c_repr())), _ => self.clone(), } } pub fn to_packed_repr(&self) -> Self { - use self::TypeDescriptor::*; - match *self { - Compound(ref compound) => Compound(compound.to_packed_repr()), - FixedArray(ref ty, size) => FixedArray(Box::new(ty.to_packed_repr()), size), - VarLenArray(ref ty) => VarLenArray(Box::new(ty.to_packed_repr())), + Self::Compound(ref compound) => Self::Compound(compound.to_packed_repr()), + Self::FixedArray(ref ty, size) => Self::FixedArray(Box::new(ty.to_packed_repr()), size), + Self::VarLenArray(ref ty) => Self::VarLenArray(Box::new(ty.to_packed_repr())), _ => self.clone(), } } @@ -330,13 +323,10 @@ macro_rules! impl_tuple { impl_tuple! { A, B, C, D, E, F, G, H, I, J, K, L } -unsafe impl, I: H5Type> H5Type for T { +unsafe impl H5Type for [T; N] { #[inline] fn type_descriptor() -> TypeDescriptor { - TypeDescriptor::FixedArray( - Box::new(::type_descriptor()), - ::capacity(), - ) + TypeDescriptor::FixedArray(Box::new(::type_descriptor()), N) } } @@ -347,17 +337,17 @@ unsafe impl H5Type for VarLenArray { } } -unsafe impl> H5Type for FixedAscii { +unsafe impl H5Type for FixedAscii { #[inline] fn type_descriptor() -> TypeDescriptor { - TypeDescriptor::FixedAscii(A::capacity()) + TypeDescriptor::FixedAscii(N) } } -unsafe impl> H5Type for FixedUnicode { +unsafe impl H5Type for FixedUnicode { #[inline] fn type_descriptor() -> TypeDescriptor { - TypeDescriptor::FixedUnicode(A::capacity()) + TypeDescriptor::FixedUnicode(N) } } @@ -438,8 +428,8 @@ pub mod tests { #[test] pub fn test_string_types() { - type FA = FixedAscii<[u8; 16]>; - type FU = FixedUnicode<[u8; 32]>; + type FA = FixedAscii<16>; + type FU = FixedUnicode<32>; assert_eq!(FA::type_descriptor(), TD::FixedAscii(16)); assert_eq!(FU::type_descriptor(), TD::FixedUnicode(32)); assert_eq!(VarLenAscii::type_descriptor(), TD::VarLenAscii); diff --git a/hdf5-types/src/lib.rs b/hdf5-types/src/lib.rs index d76172dc8..7e0532282 100644 --- a/hdf5-types/src/lib.rs +++ b/hdf5-types/src/lib.rs @@ -1,16 +1,60 @@ #![recursion_limit = "1024"] #![cfg_attr(feature = "cargo-clippy", allow(clippy::missing_safety_doc))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::missing_const_for_fn))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::redundant_pub_crate))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::must_use_candidate))] + +//! Types that can be stored and retrieved from a `HDF5` dataset +//! +//! Crate features: +//! * `h5-alloc`: Use the `hdf5` allocator for varlen types and dynamic values. +//! This is necessary on platforms which uses different allocators +//! in different libraries (e.g. dynamic libraries on windows), +//! or if `hdf5-c` is compiled with the MEMCHECKER option. +//! This option is forced on in the case of using a `windows` DLL. #[cfg(test)] #[macro_use] extern crate quickcheck; mod array; +pub mod dyn_value; mod h5type; mod string; -pub use self::array::{Array, VarLenArray}; +pub use self::array::VarLenArray; +pub use self::dyn_value::{DynValue, OwnedDynValue}; pub use self::h5type::{ CompoundField, CompoundType, EnumMember, EnumType, FloatSize, H5Type, IntSize, TypeDescriptor, }; pub use self::string::{FixedAscii, FixedUnicode, StringError, VarLenAscii, VarLenUnicode}; + +pub(crate) unsafe fn malloc(n: usize) -> *mut core::ffi::c_void { + cfg_if::cfg_if! { + if #[cfg(any(feature = "h5-alloc", windows_dll))] { + hdf5_sys::h5::H5allocate_memory(n, 0) + } else { + libc::malloc(n) + } + } +} + +pub(crate) unsafe fn free(ptr: *mut core::ffi::c_void) { + cfg_if::cfg_if! { + if #[cfg(any(feature = "h5-alloc", windows_dll))] { + hdf5_sys::h5::H5free_memory(ptr); + } else { + libc::free(ptr); + } + } +} + +pub const USING_H5_ALLOCATOR: bool = { + cfg_if::cfg_if! { + if #[cfg(any(feature = "h5-alloc", windows_dll))] { + true + } else { + false + } + } +}; diff --git a/hdf5-types/src/string.rs b/hdf5-types/src/string.rs index e006e2e80..5f587f500 100644 --- a/hdf5-types/src/string.rs +++ b/hdf5-types/src/string.rs @@ -1,3 +1,4 @@ +#![allow(clippy::redundant_slicing)] use std::borrow::{Borrow, Cow}; use std::error::Error as StdError; use std::fmt; @@ -10,8 +11,6 @@ use std::str::{self, FromStr}; use ascii::{AsAsciiStr, AsAsciiStrError, AsciiStr}; -use crate::array::Array; - #[derive(Clone, Copy, PartialEq, Eq, Debug)] #[non_exhaustive] pub enum StringError { @@ -22,7 +21,7 @@ pub enum StringError { impl From for StringError { fn from(err: AsAsciiStrError) -> Self { - StringError::AsciiError(err) + Self::AsciiError(err) } } @@ -45,15 +44,15 @@ impl fmt::Display for StringError { // ================================================================================ macro_rules! impl_string_eq { - ($lhs:ty, $rhs:ty $(,$t:ident: $b:ident<$a:ident=$v:ty>)*) => { - impl<'a $(,$t: $b<$a=$v>)*> PartialEq<$rhs> for $lhs { + ($lhs:ty, $rhs:ty $(,const $N:ident: usize)*) => { + impl<'a $(,const $N: usize)*> PartialEq<$rhs> for $lhs { #[inline] fn eq(&self, other: &$rhs) -> bool { PartialEq::eq(&self[..], &other[..]) } } - impl<'a $(,$t: $b<$a=$v>)*> PartialEq<$lhs> for $rhs { + impl<'a $(,const $N: usize)*> PartialEq<$lhs> for $rhs { #[inline] fn eq(&self, other: &$lhs) -> bool { PartialEq::eq(&self[..], &other[..]) @@ -63,36 +62,36 @@ macro_rules! impl_string_eq { } macro_rules! impl_string_traits { - ($nm:ident, $ty:ty $(,$t:ident: $b:ident<$a:ident=$v:ty>)*) => ( - impl<'a $(,$t: $b<$a=$v>)*> fmt::Debug for $ty { + ($ty:ty $(, const $N:ident: usize)*) => ( + impl<'a $(,const $N: usize)*> fmt::Debug for $ty { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.as_str().fmt(f) } } - impl<'a $(,$t: $b<$a=$v>)*> fmt::Display for $ty { + impl<'a $(,const $N: usize)*> fmt::Display for $ty { #[inline] fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.as_str().fmt(f) } } - impl<'a $(,$t: $b<$a=$v>)*> Hash for $ty { + impl<'a $(,const $N: usize)*> Hash for $ty { #[inline] fn hash(&self, hasher: &mut H) { Hash::hash(&self.as_bytes(), hasher) } } - impl<'a $(,$t: $b<$a=$v>)*> Default for $ty { + impl<'a $(,const $N: usize)*> Default for $ty { #[inline] - fn default() -> $ty { - $nm::new() + fn default() -> Self { + Self::new() } } - impl<'a $(,$t: $b<$a=$v>)*> Deref for $ty { + impl<'a $(,const $N: usize)*> Deref for $ty { type Target = str; #[inline] @@ -101,28 +100,28 @@ macro_rules! impl_string_traits { } } - impl<'a $(,$t: $b<$a=$v>)*> Borrow for $ty { + impl<'a $(,const $N: usize)*> Borrow for $ty { #[inline] fn borrow(&self) -> &str { self } } - impl<'a $(,$t: $b<$a=$v>)*> AsRef for $ty { + impl<'a $(,const $N: usize)*> AsRef for $ty { #[inline] fn as_ref(&self) -> &str { self } } - impl<'a $(,$t: $b<$a=$v>)*> AsRef<[u8]> for $ty { + impl<'a $(,const $N: usize)*> AsRef<[u8]> for $ty { #[inline] fn as_ref(&self) -> &[u8] { self.as_bytes() } } - impl<'a $(,$t: $b<$a=$v>)*> Index for $ty { + impl<'a $(,const $N: usize)*> Index for $ty { type Output = str; #[inline] @@ -131,42 +130,42 @@ macro_rules! impl_string_traits { } } - impl<'a $(,$t: $b<$a=$v>)*> PartialEq for $ty { + impl<'a $(,const $N: usize)*> PartialEq for $ty { #[inline] fn eq(&self, other: &Self) -> bool { PartialEq::eq(&self[..], &other[..]) } } - impl<'a $(,$t: $b<$a=$v>)*> Eq for $ty { } + impl<'a $(,const $N: usize)*> Eq for $ty { } - impl_string_eq!($ty, str $(,$t: $b<$a=$v>)*); - impl_string_eq!($ty, &'a str $(,$t: $b<$a=$v>)*); - impl_string_eq!($ty, String $(,$t: $b<$a=$v>)*); - impl_string_eq!($ty, Cow<'a, str> $(,$t: $b<$a=$v>)*); + impl_string_eq!($ty, str $(,const $N: usize)*); + impl_string_eq!($ty, &'a str $(,const $N: usize)*); + impl_string_eq!($ty, String $(,const $N: usize)*); + impl_string_eq!($ty, Cow<'a, str> $(,const $N: usize)*); - impl<'a $(,$t: $b<$a=$v>)*> From<$ty> for String { + impl<'a $(,const $N: usize)*> From<$ty> for String { #[inline] fn from(s: $ty) -> String { s.as_str().to_owned() } } - impl<'a $(,$t: $b<$a=$v>)*> From<&'a $ty> for &'a [u8] { + impl<'a $(,const $N: usize)*> From<&'a $ty> for &'a [u8] { #[inline] fn from(s: &$ty) -> &[u8] { s.as_bytes() } } - impl<'a $(,$t: $b<$a=$v>)*> From<&'a $ty> for &'a str { + impl<'a $(,const $N: usize)*> From<&'a $ty> for &'a str { #[inline] fn from(s: &$ty) -> &str { s.as_str() } } - impl<'a $(,$t: $b<$a=$v>)*> From<$ty> for Vec { + impl<'a $(,const $N: usize)*> From<$ty> for Vec { #[inline] fn from(s: $ty) -> Vec { s.as_bytes().to_vec() @@ -175,10 +174,10 @@ macro_rules! impl_string_traits { ) } -impl_string_traits!(FixedAscii, FixedAscii, A: Array); -impl_string_traits!(FixedUnicode, FixedUnicode, A: Array); -impl_string_traits!(VarLenAscii, VarLenAscii); -impl_string_traits!(VarLenUnicode, VarLenUnicode); +impl_string_traits!(FixedAscii, const N: usize); +impl_string_traits!(FixedUnicode, const N: usize); +impl_string_traits!(VarLenAscii); +impl_string_traits!(VarLenUnicode); // ================================================================================ @@ -191,7 +190,7 @@ impl Drop for VarLenAscii { #[inline] fn drop(&mut self) { if !self.ptr.is_null() { - unsafe { libc::free(self.ptr as *mut _) }; + unsafe { crate::free(self.ptr.cast()) }; } } } @@ -207,18 +206,18 @@ impl VarLenAscii { #[inline] pub fn new() -> Self { unsafe { - let ptr = libc::malloc(1) as *mut _; + let ptr = crate::malloc(1).cast(); *ptr = 0; - VarLenAscii { ptr } + Self { ptr } } } #[inline] unsafe fn from_bytes(bytes: &[u8]) -> Self { - let ptr = libc::malloc(bytes.len() + 1) as *mut _; + let ptr = crate::malloc(bytes.len() + 1).cast(); ptr::copy_nonoverlapping(bytes.as_ptr(), ptr, bytes.len()); *ptr.add(bytes.len()) = 0; - VarLenAscii { ptr } + Self { ptr } } #[inline] @@ -294,7 +293,7 @@ impl Drop for VarLenUnicode { #[inline] fn drop(&mut self) { if !self.ptr.is_null() { - unsafe { libc::free(self.ptr as *mut _) }; + unsafe { crate::free(self.ptr.cast()) }; } } } @@ -310,18 +309,18 @@ impl VarLenUnicode { #[inline] pub fn new() -> Self { unsafe { - let ptr = libc::malloc(1) as *mut _; + let ptr = crate::malloc(1).cast(); *ptr = 0; - VarLenUnicode { ptr } + Self { ptr } } } #[inline] unsafe fn from_bytes(bytes: &[u8]) -> Self { - let ptr = libc::malloc(bytes.len() + 1) as *mut _; + let ptr = crate::malloc(bytes.len() + 1).cast(); ptr::copy_nonoverlapping(bytes.as_ptr(), ptr, bytes.len()); *ptr.add(bytes.len()) = 0; - VarLenUnicode { ptr } + Self { ptr } } #[inline] @@ -375,44 +374,33 @@ impl FromStr for VarLenUnicode { // ================================================================================ #[repr(C)] -#[derive(Copy)] -pub struct FixedAscii> { - buf: A, -} - -impl> Clone for FixedAscii { - #[inline] - fn clone(&self) -> Self { - unsafe { - let mut buf = mem::MaybeUninit::::uninit(); - ptr::copy_nonoverlapping(self.buf.as_ptr(), buf.as_mut_ptr() as *mut _, A::capacity()); - FixedAscii { buf: buf.assume_init() } - } - } +#[derive(Copy, Clone)] +pub struct FixedAscii { + buf: [u8; N], } -impl> FixedAscii { +impl FixedAscii { #[inline] pub fn new() -> Self { - unsafe { FixedAscii { buf: mem::zeroed() } } + unsafe { Self { buf: mem::zeroed() } } } #[inline] unsafe fn from_bytes(bytes: &[u8]) -> Self { - let len = if bytes.len() < A::capacity() { bytes.len() } else { A::capacity() }; - let mut buf: A = mem::zeroed(); - ptr::copy_nonoverlapping(bytes.as_ptr(), buf.as_mut_ptr() as *mut _, len); - FixedAscii { buf } + let len = if bytes.len() < N { bytes.len() } else { N }; + let mut buf: [u8; N] = mem::zeroed(); + ptr::copy_nonoverlapping(bytes.as_ptr(), buf.as_mut_ptr().cast(), len); + Self { buf } } #[inline] fn as_raw_slice(&self) -> &[u8] { - unsafe { slice::from_raw_parts(self.buf.as_ptr(), A::capacity()) } + unsafe { slice::from_raw_parts(self.buf.as_ptr(), N) } } #[inline] - pub fn capacity() -> usize { - A::capacity() + pub const fn capacity() -> usize { + N } #[inline] @@ -447,7 +435,7 @@ impl> FixedAscii { pub fn from_ascii>(bytes: &B) -> Result { let bytes = bytes.as_ref(); - if bytes.len() > A::capacity() { + if bytes.len() > N { return Err(StringError::InsufficientCapacity); } let s = AsciiStr::from_ascii(bytes)?; @@ -455,7 +443,7 @@ impl> FixedAscii { } } -impl> AsAsciiStr for FixedAscii { +impl AsAsciiStr for FixedAscii { type Inner = u8; #[inline] @@ -480,39 +468,28 @@ impl> AsAsciiStr for FixedAscii { // ================================================================================ #[repr(C)] -#[derive(Copy)] -pub struct FixedUnicode> { - buf: A, -} - -impl> Clone for FixedUnicode { - #[inline] - fn clone(&self) -> Self { - unsafe { - let mut buf = mem::MaybeUninit::::uninit(); - ptr::copy_nonoverlapping(self.buf.as_ptr(), buf.as_mut_ptr() as *mut _, A::capacity()); - FixedUnicode { buf: buf.assume_init() } - } - } +#[derive(Copy, Clone)] +pub struct FixedUnicode { + buf: [u8; N], } -impl> FixedUnicode { +impl FixedUnicode { #[inline] pub fn new() -> Self { - unsafe { FixedUnicode { buf: mem::zeroed() } } + unsafe { Self { buf: mem::zeroed() } } } #[inline] unsafe fn from_bytes(bytes: &[u8]) -> Self { - let len = if bytes.len() < A::capacity() { bytes.len() } else { A::capacity() }; - let mut buf: A = mem::zeroed(); - ptr::copy_nonoverlapping(bytes.as_ptr(), buf.as_mut_ptr() as *mut _, len); - FixedUnicode { buf } + let len = if bytes.len() < N { bytes.len() } else { N }; + let mut buf: [u8; N] = mem::zeroed(); + ptr::copy_nonoverlapping(bytes.as_ptr(), buf.as_mut_ptr().cast(), len); + Self { buf } } #[inline] fn as_raw_slice(&self) -> &[u8] { - unsafe { slice::from_raw_parts(self.buf.as_ptr(), A::capacity()) } + unsafe { slice::from_raw_parts(self.buf.as_ptr(), N) } } #[inline] @@ -521,8 +498,8 @@ impl> FixedUnicode { } #[inline] - pub fn capacity() -> usize { - A::capacity() + pub const fn capacity() -> usize { + N } #[inline] @@ -556,14 +533,11 @@ impl> FixedUnicode { } } -impl FromStr for FixedUnicode -where - A: Array, -{ +impl FromStr for FixedUnicode { type Err = StringError; fn from_str(s: &str) -> Result::Err> { - if s.as_bytes().len() <= A::capacity() { + if s.as_bytes().len() <= N { unsafe { Ok(Self::from_bytes(s.as_bytes())) } } else { Err(StringError::InsufficientCapacity) @@ -587,8 +561,8 @@ pub mod tests { type VA = VarLenAscii; type VU = VarLenUnicode; - type FA = FixedAscii<[u8; 1024]>; - type FU = FixedUnicode<[u8; 1024]>; + type FA = FixedAscii<1024>; + type FU = FixedUnicode<1024>; #[derive(Clone, Debug)] pub struct AsciiGen(pub Vec); @@ -597,7 +571,7 @@ pub mod tests { pub struct UnicodeGen(pub String); impl Arbitrary for AsciiGen { - fn arbitrary(g: &mut G) -> Self { + fn arbitrary(g: &mut Gen) -> Self { let mut bytes: Vec = Arbitrary::arbitrary(g); for c in &mut bytes { *c = *c % 0x7e + 1; @@ -620,7 +594,7 @@ pub mod tests { } impl Arbitrary for UnicodeGen { - fn arbitrary(g: &mut G) -> Self { + fn arbitrary(g: &mut Gen) -> Self { let s: String = Arbitrary::arbitrary(g); let mut s: String = s.chars().filter(|&c| c != '\0').collect(); while s.as_bytes().len() > 1024 { @@ -649,8 +623,8 @@ pub mod tests { #[test] pub fn test_capacity() { - type A = FixedAscii<[u8; 2]>; - type U = FixedUnicode<[u8; 2]>; + type A = FixedAscii<2>; + type U = FixedUnicode<2>; assert_eq!(A::from_ascii("ab").unwrap().as_str(), "ab"); assert!(A::from_ascii("abc").is_err()); assert_eq!(U::from_str("ab").unwrap().as_str(), "ab"); @@ -669,8 +643,8 @@ pub mod tests { #[test] pub fn test_null_padding() { - type A = FixedAscii<[u8; 3]>; - type U = FixedUnicode<[u8; 3]>; + type A = FixedAscii<3>; + type U = FixedUnicode<3>; assert_eq!(A::from_ascii("a\0b").unwrap().as_str(), "a\0b"); assert_eq!(A::from_ascii("a\0\0").unwrap().as_str(), "a"); assert!(A::from_ascii("\0\0\0").unwrap().is_empty()); diff --git a/licenses/hdf5.txt b/licenses/hdf5.txt deleted file mode 100644 index 6ac33ce8b..000000000 --- a/licenses/hdf5.txt +++ /dev/null @@ -1,107 +0,0 @@ -Copyright Notice and License Terms for -HDF5 (Hierarchical Data Format 5) Software Library and Utilities ------------------------------------------------------------------------------ - -HDF5 (Hierarchical Data Format 5) Software Library and Utilities -Copyright 2006 by The HDF Group. - -NCSA HDF5 (Hierarchical Data Format 5) Software Library and Utilities -Copyright 1998-2006 by The Board of Trustees of the University of Illinois. - -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted for any purpose (including commercial purposes) -provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, - this list of conditions, and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions, and the following disclaimer in the documentation - and/or materials provided with the distribution. - -3. Neither the name of The HDF Group, the name of the University, nor the - name of any Contributor may be used to endorse or promote products derived - from this software without specific prior written permission from - The HDF Group, the University, or the Contributor, respectively. - -DISCLAIMER: -THIS SOFTWARE IS PROVIDED BY THE HDF GROUP AND THE CONTRIBUTORS -"AS IS" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED. IN NO -EVENT SHALL THE HDF GROUP OR THE CONTRIBUTORS BE LIABLE FOR ANY DAMAGES -SUFFERED BY THE USERS ARISING OUT OF THE USE OF THIS SOFTWARE, EVEN IF -ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -You are under no obligation whatsoever to provide any bug fixes, patches, or -upgrades to the features, functionality or performance of the source code -("Enhancements") to anyone; however, if you choose to make your Enhancements -available either publicly, or directly to The HDF Group, without imposing a -separate written license agreement for such Enhancements, then you hereby -grant the following license: a non-exclusive, royalty-free perpetual license -to install, use, modify, prepare derivative works, incorporate into other -computer software, distribute, and sublicense such enhancements or derivative -works thereof, in binary and source code form. - ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ - -Limited portions of HDF5 were developed by Lawrence Berkeley National -Laboratory (LBNL). LBNL's Copyright Notice and Licensing Terms can be -found here: COPYING_LBNL_HDF5 file in this directory or at -http://support.hdfgroup.org/ftp/HDF5/releases/COPYING_LBNL_HDF5. - ------------------------------------------------------------------------------ ------------------------------------------------------------------------------ - -Contributors: National Center for Supercomputing Applications (NCSA) at -the University of Illinois, Fortner Software, Unidata Program Center -(netCDF), The Independent JPEG Group (JPEG), Jean-loup Gailly and Mark Adler -(gzip), and Digital Equipment Corporation (DEC). - ------------------------------------------------------------------------------ - -Portions of HDF5 were developed with support from the Lawrence Berkeley -National Laboratory (LBNL) and the United States Department of Energy -under Prime Contract No. DE-AC02-05CH11231. - ------------------------------------------------------------------------------ - -Portions of HDF5 were developed with support from the University of -California, Lawrence Livermore National Laboratory (UC LLNL). -The following statement applies to those portions of the product and must -be retained in any redistribution of source code, binaries, documentation, -and/or accompanying materials: - - This work was partially produced at the University of California, - Lawrence Livermore National Laboratory (UC LLNL) under contract - no. W-7405-ENG-48 (Contract 48) between the U.S. Department of Energy - (DOE) and The Regents of the University of California (University) - for the operation of UC LLNL. - - DISCLAIMER: - THIS WORK WAS PREPARED AS AN ACCOUNT OF WORK SPONSORED BY AN AGENCY OF - THE UNITED STATES GOVERNMENT. NEITHER THE UNITED STATES GOVERNMENT NOR - THE UNIVERSITY OF CALIFORNIA NOR ANY OF THEIR EMPLOYEES, MAKES ANY - WARRANTY, EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY OR RESPONSIBILITY - FOR THE ACCURACY, COMPLETENESS, OR USEFULNESS OF ANY INFORMATION, - APPARATUS, PRODUCT, OR PROCESS DISCLOSED, OR REPRESENTS THAT ITS USE - WOULD NOT INFRINGE PRIVATELY- OWNED RIGHTS. REFERENCE HEREIN TO ANY - SPECIFIC COMMERCIAL PRODUCTS, PROCESS, OR SERVICE BY TRADE NAME, - TRADEMARK, MANUFACTURER, OR OTHERWISE, DOES NOT NECESSARILY CONSTITUTE - OR IMPLY ITS ENDORSEMENT, RECOMMENDATION, OR FAVORING BY THE UNITED - STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA. THE VIEWS AND - OPINIONS OF AUTHORS EXPRESSED HEREIN DO NOT NECESSARILY STATE OR REFLECT - THOSE OF THE UNITED STATES GOVERNMENT OR THE UNIVERSITY OF CALIFORNIA, - AND SHALL NOT BE USED FOR ADVERTISING OR PRODUCT ENDORSEMENT PURPOSES. - ------------------------------------------------------------------------------ - -HDF5 is available with the SZIP compression library but SZIP is not part -of HDF5 and has separate copyright and license terms. See SZIP Compression -in HDF Products (www.hdfgroup.org/doc_resource/SZIP/) for further details. - ------------------------------------------------------------------------------ - - - diff --git a/src/class.rs b/src/class.rs index 274f70ef2..abcd7cb60 100644 --- a/src/class.rs +++ b/src/class.rs @@ -24,8 +24,8 @@ pub trait ObjectClass: Sized { fn from_id(id: hid_t) -> Result { h5lock!({ - if Self::is_valid_id_type(get_id_type(id)) { - let handle = Handle::try_new(id)?; + let handle = Handle::try_new(id)?; + if Self::is_valid_id_type(handle.id_type()) { let obj = Self::from_handle(handle); obj.validate().map(|_| obj) } else { @@ -43,24 +43,34 @@ pub trait ObjectClass: Sized { } unsafe fn transmute(&self) -> &T { - &*(self as *const Self as *const T) + &*(self as *const Self).cast::() } unsafe fn transmute_mut(&mut self) -> &mut T { - &mut *(self as *mut Self as *mut T) + &mut *(self as *mut Self).cast::() } - unsafe fn cast(self) -> T { + unsafe fn cast_unchecked(self) -> T { // This method requires you to be 18 years or older to use it - let obj = ptr::read(&self as *const _ as *const _); + // (note: if it wasn't a trait method, it could be marked as const) + let obj = ptr::read((&self as *const Self).cast()); mem::forget(self); obj } + fn cast(self) -> Result { + let id_type = self.handle().id_type(); + if Self::is_valid_id_type(id_type) { + Ok(unsafe { self.cast_unchecked() }) + } else { + Err(format!("unable to cast {} ({:?}) into {}", Self::NAME, id_type, T::NAME).into()) + } + } + fn debug_fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { // TODO: this can moved out if/when specialization lands in stable h5lock!({ - if !is_valid_user_id(self.handle().id()) { + if !self.handle().is_valid_user_id() { write!(f, "", Self::NAME) } else if let Some(d) = self.short_repr() { write!(f, "", Self::NAME, d) diff --git a/src/dim.rs b/src/dim.rs index 65e944fa1..d7b49d037 100644 --- a/src/dim.rs +++ b/src/dim.rs @@ -19,7 +19,7 @@ pub trait Dimension { } } -impl<'a, T: Dimension> Dimension for &'a T { +impl<'a, T: Dimension + ?Sized> Dimension for &'a T { fn ndim(&self) -> usize { Dimension::ndim(*self) } @@ -96,3 +96,13 @@ impl Dimension for Ix { vec![*self] } } + +#[cfg(test)] +pub mod tests { + // compile-time test + #[allow(dead_code)] + pub fn slice_as_shape(shape: &[usize]) { + let file = crate::File::create("foo.h5").unwrap(); + file.new_dataset::().shape(shape).create("Test").unwrap(); + } +} diff --git a/src/error.rs b/src/error.rs index 6fae25a59..a8e7778e7 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,142 +1,78 @@ -use std::cell::RefCell; +use std::convert::Infallible; use std::error::Error as StdError; use std::fmt; -use std::ops::Index; +use std::io; +use std::ops::Deref; use std::panic; use std::ptr; -use lazy_static::lazy_static; use ndarray::ShapeError; -use num_integer::Integer; -use num_traits::{Bounded, Zero}; -use parking_lot::Mutex; +#[cfg(not(feature = "1.10.0"))] +use hdf5_sys::h5::hssize_t; use hdf5_sys::h5e::{ - H5E_error2_t, H5Eclose_stack, H5Eget_current_stack, H5Eget_msg, H5Eprint2, H5Eset_auto2, - H5Ewalk2, H5E_DEFAULT, H5E_WALK_DOWNWARD, + H5E_auto2_t, H5E_error2_t, H5Eget_current_stack, H5Eget_msg, H5Eprint2, H5Eset_auto2, H5Ewalk2, + H5E_DEFAULT, H5E_WALK_DOWNWARD, }; use crate::internal_prelude::*; -#[derive(Clone, Debug)] -pub struct ErrorFrame { - desc: String, - func: String, - major: String, - minor: String, - description: String, -} - -impl ErrorFrame { - pub fn new(desc: &str, func: &str, major: &str, minor: &str) -> Self { - Self { - desc: desc.into(), - func: func.into(), - major: major.into(), - minor: minor.into(), - description: format!("{}(): {}", func, desc), - } - } - - pub fn desc(&self) -> &str { - self.desc.as_ref() - } - - pub fn description(&self) -> &str { - self.description.as_ref() - } - - pub fn detail(&self) -> Option { - Some(format!("Error in {}(): {} [{}: {}]", self.func, self.desc, self.major, self.minor)) - } +/// Silence errors emitted by `hdf5` +/// +/// Safety: This version is not thread-safe and must be syncronised +/// with other calls to `hdf5` +pub(crate) unsafe fn silence_errors_no_sync(silence: bool) { + // Cast function with different argument types. This is safe because H5Eprint2 is + // documented to support this interface + let h5eprint: Option herr_t> = + Some(H5Eprint2 as _); + let h5eprint: H5E_auto2_t = std::mem::transmute(h5eprint); + H5Eset_auto2(H5E_DEFAULT, if silence { None } else { h5eprint }, ptr::null_mut()); } -#[must_use] -#[doc(hidden)] -pub struct SilenceErrors; - -impl Default for SilenceErrors { - fn default() -> Self { - Self::new() - } -} - -lazy_static! { - static ref ERROR_HANDLER: Mutex> = Mutex::default(); +/// Silence errors emitted by `hdf5` +pub fn silence_errors(silence: bool) { + h5lock!(silence_errors_no_sync(silence)); } -extern "C" fn default_error_handler(estack: hid_t, _cdata: *mut c_void) -> herr_t { - panic::catch_unwind(|| unsafe { H5Eprint2(estack, ptr::null_mut()) }).unwrap_or(-1) -} +#[repr(transparent)] +#[derive(Clone)] +pub struct ErrorStack(Handle); -impl SilenceErrors { - pub fn new() -> Self { - Self::silence(true); - Self - } +impl ObjectClass for ErrorStack { + const NAME: &'static str = "errorstack"; + const VALID_TYPES: &'static [H5I_type_t] = &[H5I_ERROR_STACK]; - fn silence(on: bool) { - let guard = ERROR_HANDLER.lock(); - let counter = &mut *guard.borrow_mut(); - if on { - *counter += 1; - if *counter == 1 { - h5lock!(H5Eset_auto2(H5E_DEFAULT, None, ptr::null_mut())); - } - } else { - if *counter > 0 { - *counter -= 1; - } - if *counter == 0 { - h5lock!(H5Eset_auto2(H5E_DEFAULT, Some(default_error_handler), ptr::null_mut())); - } - } + fn from_handle(handle: Handle) -> Self { + Self(handle) } -} -impl Drop for SilenceErrors { - fn drop(&mut self) { - Self::silence(false); + fn handle(&self) -> &Handle { + &self.0 } -} - -pub fn silence_errors() -> SilenceErrors { - SilenceErrors::new() -} - -#[derive(Clone, Debug)] -pub struct ErrorStack { - frames: Vec, - description: Option, -} -impl Index for ErrorStack { - type Output = ErrorFrame; - - fn index(&self, index: usize) -> &ErrorFrame { - &self.frames[index] - } + // TODO: short_repr() } -impl Default for ErrorStack { - fn default() -> Self { - Self::new() +impl ErrorStack { + pub(crate) fn from_current() -> Result { + let stack_id = h5lock!(H5Eget_current_stack()); + Handle::try_new(stack_id).map(Self) } -} -struct CallbackData { - stack: ErrorStack, - err: Option, -} - -impl ErrorStack { - // This low-level function is not thread-safe and has to be synchronized by the user - pub fn query() -> Result> { + /// Expands the error stack to a format which is easier to handle + // known HDF5 bug: H5Eget_msg() used in this function may corrupt + // the current stack, so we use self over &self + pub fn expand(self) -> Result { + struct CallbackData { + stack: ExpandedErrorStack, + err: Option, + } extern "C" fn callback( _: c_uint, err_desc: *const H5E_error2_t, data: *mut c_void, ) -> herr_t { panic::catch_unwind(|| unsafe { - let data = &mut *(data as *mut CallbackData); + let data = &mut *(data.cast::()); if data.err.is_some() { return 0; } @@ -159,33 +95,77 @@ impl ErrorStack { .unwrap_or(-1) } - let mut data = CallbackData { stack: Self::new(), err: None }; - let data_ptr: *mut c_void = &mut data as *mut _ as *mut _; + let mut data = CallbackData { stack: ExpandedErrorStack::new(), err: None }; + let data_ptr: *mut c_void = (&mut data as *mut CallbackData).cast::(); - // known HDF5 bug: H5Eget_msg() may corrupt the current stack, so we copy it first - let stack_id = h5lock!(H5Eget_current_stack()); - ensure!(stack_id >= 0, "failed to copy the current error stack"); + let stack_id = self.handle().id(); h5lock!({ H5Ewalk2(stack_id, H5E_WALK_DOWNWARD, Some(callback), data_ptr); - H5Eclose_stack(stack_id); }); - match (data.err, data.stack.is_empty()) { - (Some(err), _) => Err(err), - (None, false) => Ok(Some(data.stack)), - (None, true) => Ok(None), + data.err.map_or(Ok(data.stack), Err) + } +} + +#[derive(Clone, Debug)] +pub struct ErrorFrame { + desc: String, + func: String, + major: String, + minor: String, + description: String, +} + +impl ErrorFrame { + pub(crate) fn new(desc: &str, func: &str, major: &str, minor: &str) -> Self { + Self { + desc: desc.into(), + func: func.into(), + major: major.into(), + minor: minor.into(), + description: format!("{}(): {}", func, desc), } } - pub fn new() -> Self { - Self { frames: Vec::new(), description: None } + pub fn desc(&self) -> &str { + self.desc.as_ref() + } + + pub fn description(&self) -> &str { + self.description.as_ref() + } + + pub fn detail(&self) -> Option { + Some(format!("Error in {}(): {} [{}: {}]", self.func, self.desc, self.major, self.minor)) + } +} + +#[derive(Clone, Debug)] +pub struct ExpandedErrorStack { + frames: Vec, + description: Option, +} + +impl Deref for ExpandedErrorStack { + type Target = [ErrorFrame]; + + fn deref(&self) -> &Self::Target { + &self.frames + } +} + +impl Default for ExpandedErrorStack { + fn default() -> Self { + Self::new() } +} - pub fn len(&self) -> usize { - self.frames.len() +impl ExpandedErrorStack { + pub(crate) fn new() -> Self { + Self { frames: Vec::new(), description: None } } - pub fn push(&mut self, frame: ErrorFrame) { + pub(crate) fn push(&mut self, frame: ErrorFrame) { self.frames.push(frame); if !self.is_empty() { let top_desc = self.frames[0].description().to_owned(); @@ -198,16 +178,8 @@ impl ErrorStack { } } - pub fn is_empty(&self) -> bool { - self.frames.is_empty() - } - pub fn top(&self) -> Option<&ErrorFrame> { - if self.is_empty() { - None - } else { - Some(&self.frames[0]) - } + self.get(0) } pub fn description(&self) -> &str { @@ -233,21 +205,16 @@ pub enum Error { /// A type for results generated by HDF5-related functions where the `Err` type is /// set to `hdf5::Error`. -pub type Result = ::std::result::Result; +pub type Result = ::std::result::Result; impl Error { - pub fn query() -> Option { - match ErrorStack::query() { - Err(err) => Some(err), - Ok(Some(stack)) => Some(Self::HDF5(stack)), - Ok(None) => None, - } - } - - pub fn description(&self) -> &str { - match *self { - Self::Internal(ref desc) => desc.as_ref(), - Self::HDF5(ref stack) => stack.description(), + /// Obtain the current error stack. The stack might be empty, which + /// will result in a valid error stack + pub fn query() -> Result { + if let Ok(stack) = ErrorStack::from_current() { + Ok(Self::HDF5(stack)) + } else { + Err(Self::Internal("Could not get errorstack".to_owned())) } } } @@ -264,18 +231,33 @@ impl From for Error { } } +impl From for Error { + fn from(_: Infallible) -> Self { + unreachable!("Infallible error can never be constructed") + } +} + impl fmt::Debug for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { Self::Internal(ref desc) => f.write_str(desc), - Self::HDF5(ref stack) => f.write_str(stack.description()), + Self::HDF5(ref stack) => match stack.clone().expand() { + Ok(stack) => f.write_str(stack.description()), + Err(_) => f.write_str("Could not get error stack"), + }, } } } impl fmt::Display for Error { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.write_str(self.description()) + match *self { + Self::Internal(ref desc) => f.write_str(desc), + Self::HDF5(ref stack) => match stack.clone().expand() { + Ok(stack) => f.write_str(stack.description()), + Err(_) => f.write_str("Could not get error stack"), + }, + } } } @@ -283,29 +265,66 @@ impl StdError for Error {} impl From for Error { fn from(err: ShapeError) -> Self { - format!("shape error: {}", err.to_string()).into() + format!("shape error: {}", err).into() + } +} + +impl From for io::Error { + fn from(err: Error) -> Self { + Self::new(io::ErrorKind::Other, err) + } +} + +pub fn h5check(value: T) -> Result { + H5ErrorCode::h5check(value) +} + +#[allow(unused)] +pub fn is_err_code(value: T) -> bool { + H5ErrorCode::is_err_code(value) +} + +pub trait H5ErrorCode: Copy { + fn is_err_code(value: Self) -> bool; + + fn h5check(value: Self) -> Result { + if Self::is_err_code(value) { + Err(Error::query().unwrap_or_else(|e| e)) + } else { + Ok(value) + } } } -pub fn is_err_code(value: T) -> bool -where - T: Integer + Zero + Bounded + Copy, -{ - if T::min_value() < T::zero() { - value < T::zero() - } else { - value == T::zero() +impl H5ErrorCode for hsize_t { + fn is_err_code(value: Self) -> bool { + value == 0 } } -pub fn h5check(value: T) -> Result -where - T: Integer + Zero + Bounded + Copy, -{ - if is_err_code(value) { - Error::query().map_or_else(|| Ok(value), Err) - } else { - Ok(value) +impl H5ErrorCode for herr_t { + fn is_err_code(value: Self) -> bool { + value < 0 + } +} + +#[cfg(feature = "1.10.0")] +impl H5ErrorCode for hid_t { + fn is_err_code(value: Self) -> bool { + value < 0 + } +} + +#[cfg(not(feature = "1.10.0"))] +impl H5ErrorCode for hssize_t { + fn is_err_code(value: Self) -> bool { + value < 0 + } +} + +impl H5ErrorCode for libc::ssize_t { + fn is_err_code(value: Self) -> bool { + value < 0 } } @@ -316,33 +335,44 @@ pub mod tests { use crate::globals::H5P_ROOT; use crate::internal_prelude::*; - use super::ErrorStack; + use super::ExpandedErrorStack; #[test] pub fn test_error_stack() { - let _e = silence_errors(); - - let result_no_error = h5lock!({ + let stack = h5lock!({ let plist_id = H5Pcreate(*H5P_ROOT); H5Pclose(plist_id); - ErrorStack::query() - }); - assert!(result_no_error.ok().unwrap().is_none()); + Error::query() + }) + .unwrap(); + let stack = match stack { + Error::HDF5(stack) => stack, + Error::Internal(internal) => panic!("Expected hdf5 error, not {}", internal), + } + .expand() + .unwrap(); + assert!(stack.is_empty()); - let result_error = h5lock!({ + let stack = h5lock!({ let plist_id = H5Pcreate(*H5P_ROOT); H5Pclose(plist_id); H5Pclose(plist_id); - ErrorStack::query() - }); - let stack = result_error.ok().unwrap().unwrap(); + Error::query() + }) + .unwrap(); + let stack = match stack { + Error::HDF5(stack) => stack, + Error::Internal(internal) => panic!("Expected hdf5 error, not {}", internal), + } + .expand() + .unwrap(); assert_eq!(stack.description(), "H5Pclose(): can't close: can't locate ID"); assert_eq!( &stack.detail().unwrap(), "Error in H5Pclose(): can't close [Property lists: Unable to free object]" ); - assert!(stack.len() >= 2 && stack.len() <= 3); // depending on HDF5 version + assert!(stack.len() >= 2 && stack.len() <= 4); // depending on HDF5 version assert!(!stack.is_empty()); assert_eq!(stack[0].description(), "H5Pclose(): can't close"); @@ -352,22 +382,32 @@ pub mod tests { [Property lists: Unable to free object]" ); - assert_eq!(stack[stack.len() - 1].description(), "H5I_dec_ref(): can't locate ID"); - assert_eq!( - &stack[stack.len() - 1].detail().unwrap(), - "Error in H5I_dec_ref(): can't locate ID \ + #[cfg(not(feature = "1.13.0"))] + { + assert_eq!(stack[stack.len() - 1].description(), "H5I_dec_ref(): can't locate ID"); + assert_eq!( + &stack[stack.len() - 1].detail().unwrap(), + "Error in H5I_dec_ref(): can't locate ID \ [Object atom: Unable to find atom information (already closed?)]" - ); + ); + } + #[cfg(feature = "1.13.0")] + { + assert_eq!(stack[stack.len() - 1].description(), "H5I__dec_ref(): can't locate ID"); + assert_eq!( + &stack[stack.len() - 1].detail().unwrap(), + "Error in H5I__dec_ref(): can't locate ID \ + [Object ID: Unable to find ID information (already closed?)]" + ); + } - let empty_stack = ErrorStack::new(); + let empty_stack = ExpandedErrorStack::new(); assert!(empty_stack.is_empty()); assert_eq!(empty_stack.len(), 0); } #[test] pub fn test_h5call() { - let _e = silence_errors(); - let result_no_error = h5call!({ let plist_id = H5Pcreate(*H5P_ROOT); H5Pclose(plist_id) @@ -384,8 +424,6 @@ pub mod tests { #[test] pub fn test_h5try() { - let _e = silence_errors(); - fn f1() -> Result { h5try!(H5Pcreate(*H5P_ROOT)); Ok(100) diff --git a/src/filters.rs b/src/filters.rs deleted file mode 100644 index 6d8bde5fe..000000000 --- a/src/filters.rs +++ /dev/null @@ -1,472 +0,0 @@ -use crate::globals::H5P_DATASET_CREATE; -use crate::internal_prelude::*; - -use hdf5_sys::{ - h5p::{ - H5Pcreate, H5Pget_filter2, H5Pget_nfilters, H5Pset_deflate, H5Pset_fletcher32, - H5Pset_scaleoffset, H5Pset_shuffle, H5Pset_szip, - }, - h5t::{H5Tget_class, H5T_FLOAT, H5T_INTEGER}, - h5z::{ - H5Z_filter_t, H5Zfilter_avail, H5Zget_filter_info, H5Z_FILTER_CONFIG_DECODE_ENABLED, - H5Z_FILTER_CONFIG_ENCODE_ENABLED, H5Z_FILTER_DEFLATE, H5Z_FILTER_FLETCHER32, - H5Z_FILTER_SCALEOFFSET, H5Z_FILTER_SHUFFLE, H5Z_FILTER_SZIP, H5Z_SO_FLOAT_DSCALE, - H5Z_SO_INT, H5_SZIP_EC_OPTION_MASK, H5_SZIP_NN_OPTION_MASK, - }, -}; - -/// Returns `true` if gzip filter is available. -pub fn gzip_available() -> bool { - h5lock!(H5Zfilter_avail(H5Z_FILTER_DEFLATE) == 1) -} - -/// Returns `true` if szip filter is available. -pub fn szip_available() -> bool { - h5lock!(H5Zfilter_avail(H5Z_FILTER_SZIP) == 1) -} - -/// HDF5 filters and compression options. -#[derive(Clone, PartialEq, Debug)] -pub struct Filters { - gzip: Option, - szip: Option<(bool, u8)>, - shuffle: bool, - fletcher32: bool, - scale_offset: Option, -} - -impl Default for Filters { - fn default() -> Self { - Self { gzip: None, szip: None, shuffle: false, fletcher32: false, scale_offset: None } - } -} - -impl Filters { - pub fn new() -> Self { - Self::default() - } - - /// Enable gzip compression with a specified level (0-9). - pub fn gzip(&mut self, level: u8) -> &mut Self { - self.gzip = Some(level); - self - } - - /// Disable gzip compression. - pub fn no_gzip(&mut self) -> &mut Self { - self.gzip = None; - self - } - - /// Get the current settings for gzip filter. - pub fn get_gzip(&self) -> Option { - self.gzip - } - - /// Enable szip compression with a specified method (EC, NN) and level (0-32). - /// - /// If `nn` if set to `true` (default), the nearest neighbor method is used, otherwise - /// the method is set to entropy coding. - pub fn szip(&mut self, nn: bool, level: u8) -> &mut Self { - self.szip = Some((nn, level)); - self - } - - /// Disable szip compression. - pub fn no_szip(&mut self) -> &mut Self { - self.szip = None; - self - } - - /// Get the current settings for szip filter. - /// - /// Returns a tuple `(nn, level)`, where `nn` indicates whether the nearest neighbor - /// method is used and `level` is the associated compression level. - pub fn get_szip(&self) -> Option<(bool, u8)> { - self.szip - } - - /// Enable or disable shuffle filter. - pub fn shuffle(&mut self, shuffle: bool) -> &mut Self { - self.shuffle = shuffle; - self - } - - /// Get the current settings for shuffle filter. - pub fn get_shuffle(&self) -> bool { - self.shuffle - } - - /// Enable or disable fletcher32 filter. - pub fn fletcher32(&mut self, fletcher32: bool) -> &mut Self { - self.fletcher32 = fletcher32; - self - } - - /// Get the current settings for fletcher32 filter. - pub fn get_fletcher32(&self) -> bool { - self.fletcher32 - } - - /// Enable scale-offset filter with a specified factor (0 means automatic). - pub fn scale_offset(&mut self, scale_offset: u32) -> &mut Self { - self.scale_offset = Some(scale_offset); - self - } - - /// Disable scale-offset compression. - pub fn no_scale_offset(&mut self) -> &mut Self { - self.scale_offset = None; - self - } - - /// Get the current settings for scale-offset filter. - pub fn get_scale_offset(&self) -> Option { - self.scale_offset - } - - /// Enable gzip filter with default settings (compression level 4). - pub fn gzip_default(&mut self) -> &mut Self { - self.gzip = Some(4); - self - } - - /// Enable szip filter with default settings (NN method, compression level 8). - pub fn szip_default(&mut self) -> &mut Self { - self.szip = Some((true, 8)); - self - } - - /// Returns `true` if any filters are enabled and thus chunkins is required. - pub fn has_filters(&self) -> bool { - self.gzip.is_some() - || self.szip.is_some() - || self.shuffle - || self.fletcher32 - || self.scale_offset.is_some() - } - - /// Verify whether the filters configuration is valid. - pub fn validate(&self) -> Result<()> { - if self.gzip.is_some() && self.szip.is_some() { - fail!("Cannot specify two compression options at once.") - } - if let Some(level) = self.gzip { - ensure!(level <= 9, "Invalid level for gzip compression, expected 0-9 integer."); - } - if let Some((_, pixels_per_block)) = self.szip { - ensure!( - pixels_per_block <= 32 && pixels_per_block % 2 == 0, - "Invalid pixels per block for szip compression, expected even 0-32 integer." - ); - } - if let Some(offset) = self.scale_offset { - ensure!( - offset <= c_int::max_value() as _, - "Scale-offset factor too large, maximum is {}.", - c_int::max_value() - ); - } - if self.scale_offset.is_some() && self.fletcher32 { - fail!("Cannot use lossy scale-offset filter with fletcher32."); - } - Ok(()) - } - - #[doc(hidden)] - pub fn from_dcpl(dcpl: &PropertyList) -> Result { - let mut filters = Self::default(); - h5lock!({ - let id = dcpl.id(); - let n_filters: c_int = h5try!(H5Pget_nfilters(id)); - - for idx in 0..n_filters { - let flags: *mut c_uint = &mut 0; - let n_elements: *mut size_t = &mut 16; - - let mut values: Vec = Vec::with_capacity(16); - values.set_len(16); - - let mut name: Vec = Vec::with_capacity(256); - name.set_len(256); - - let filter_config: *mut c_uint = &mut 0; - - let code = H5Pget_filter2( - id, - idx as _, - flags, - n_elements, - values.as_mut_ptr(), - 256, - name.as_mut_ptr(), - filter_config, - ); - name.push(0); - - let v0 = values.get(0).cloned().unwrap_or(0); - let v1 = values.get(1).cloned().unwrap_or(0); - - match code { - H5Z_FILTER_DEFLATE => { - filters.gzip(v0 as _); - } - H5Z_FILTER_SZIP => { - let nn = match v0 { - v if v & H5_SZIP_EC_OPTION_MASK != 0 => false, - v if v & H5_SZIP_NN_OPTION_MASK != 0 => true, - _ => fail!("Unknown szip method: {:?}", v0), - }; - filters.szip(nn, v1 as _); - } - H5Z_FILTER_SHUFFLE => { - filters.shuffle(true); - } - H5Z_FILTER_FLETCHER32 => { - filters.fletcher32(true); - } - H5Z_FILTER_SCALEOFFSET => { - filters.scale_offset(v1); - } - _ => fail!("Unsupported filter: {:?}", code), - }; - } - - Ok(()) - }) - .and(filters.validate().and(Ok(filters))) - } - - fn ensure_available(name: &str, code: H5Z_filter_t) -> Result<()> { - ensure!(h5lock!(H5Zfilter_avail(code) == 1), "Filter not available: {}", name); - - let flags: *mut c_uint = &mut 0; - h5try!(H5Zget_filter_info(code, flags)); - - ensure!( - unsafe { *flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED != 0 }, - "Encoding is not enabled for filter: {}", - name - ); - ensure!( - unsafe { *flags & H5Z_FILTER_CONFIG_DECODE_ENABLED != 0 }, - "Decoding is not enabled for filter: {}", - name - ); - Ok(()) - } - - #[doc(hidden)] - pub fn to_dcpl(&self, datatype: &Datatype) -> Result { - self.validate()?; - - h5lock!({ - let plist = PropertyList::from_id(H5Pcreate(*H5P_DATASET_CREATE))?; - let id = plist.id(); - - // fletcher32 - if self.fletcher32 { - Self::ensure_available("fletcher32", H5Z_FILTER_FLETCHER32)?; - H5Pset_fletcher32(id); - } - - // scale-offset - if let Some(offset) = self.scale_offset { - Self::ensure_available("scaleoffset", H5Z_FILTER_SCALEOFFSET)?; - match H5Tget_class(datatype.id()) { - H5T_INTEGER => { - H5Pset_scaleoffset(id, H5Z_SO_INT, offset as _); - } - H5T_FLOAT => { - ensure!( - offset > 0, - "Can only use positive scale-offset factor with floats" - ); - H5Pset_scaleoffset(id, H5Z_SO_FLOAT_DSCALE, offset as _); - } - _ => { - fail!("Can only use scale/offset with integer/float datatypes."); - } - } - } - - // shuffle - if self.shuffle { - Self::ensure_available("shuffle", H5Z_FILTER_SHUFFLE)?; - h5try!(H5Pset_shuffle(id)); - } - - // compression - if let Some(level) = self.gzip { - Self::ensure_available("gzip", H5Z_FILTER_DEFLATE)?; - h5try!(H5Pset_deflate(id, c_uint::from(level))); - } else if let Some((nn, pixels_per_block)) = self.szip { - Self::ensure_available("szip", H5Z_FILTER_SZIP)?; - let options = if nn { H5_SZIP_NN_OPTION_MASK } else { H5_SZIP_EC_OPTION_MASK }; - h5try!(H5Pset_szip(id, options, c_uint::from(pixels_per_block))); - } - - Ok(plist) - }) - } -} - -#[cfg(test)] -pub mod tests { - use super::{gzip_available, szip_available}; - use crate::internal_prelude::*; - - fn make_filters(filters: &Filters) -> Result { - let datatype = Datatype::from_type::().unwrap(); - let dcpl = filters.to_dcpl(&datatype)?; - Filters::from_dcpl(&dcpl) - } - - fn check_roundtrip(filters: &Filters) { - assert_eq!(make_filters::(filters).unwrap(), *filters); - } - - #[test] - pub fn test_szip() { - let _e = silence_errors(); - - if !szip_available() { - assert_err!( - make_filters::(&Filters::new().szip_default()), - "Filter not available: szip" - ); - } else { - assert!(Filters::new().get_szip().is_none()); - assert_eq!(Filters::new().szip(false, 4).get_szip(), Some((false, 4))); - assert!(Filters::new().szip(false, 4).no_szip().get_szip().is_none()); - assert_eq!(Filters::new().szip_default().get_szip(), Some((true, 8))); - - check_roundtrip::(Filters::new().no_szip()); - check_roundtrip::(Filters::new().szip(false, 4)); - check_roundtrip::(Filters::new().szip(true, 4)); - - check_roundtrip::(Filters::new().no_szip()); - check_roundtrip::(Filters::new().szip(false, 4)); - check_roundtrip::(Filters::new().szip(true, 4)); - - assert_err!( - make_filters::(&Filters::new().szip(false, 1)), - "Invalid pixels per block for szip compression" - ); - assert_err!( - make_filters::(&Filters::new().szip(true, 34)), - "Invalid pixels per block for szip compression" - ); - } - } - - #[test] - pub fn test_gzip() { - let _e = silence_errors(); - - if !gzip_available() { - assert_err!( - make_filters::(&Filters::new().gzip_default()), - "Filter not available: gzip" - ); - } else { - assert!(Filters::new().get_gzip().is_none()); - assert_eq!(Filters::new().gzip(7).get_gzip(), Some(7)); - assert!(Filters::new().gzip(7).no_gzip().get_gzip().is_none()); - assert_eq!(Filters::new().gzip_default().get_gzip(), Some(4)); - - check_roundtrip::(Filters::new().no_gzip()); - check_roundtrip::(Filters::new().gzip(7)); - - check_roundtrip::(Filters::new().no_gzip()); - check_roundtrip::(Filters::new().gzip(7)); - - assert_err!( - make_filters::(&Filters::new().gzip_default().szip_default()), - "Cannot specify two compression options at once" - ); - assert_err!( - make_filters::(&Filters::new().gzip(42)), - "Invalid level for gzip compression" - ); - } - } - - #[test] - pub fn test_shuffle() { - assert!(!Filters::new().get_shuffle()); - assert!(Filters::new().shuffle(true).get_shuffle()); - assert!(!Filters::new().shuffle(true).shuffle(false).get_shuffle()); - - check_roundtrip::(Filters::new().shuffle(false)); - check_roundtrip::(Filters::new().shuffle(true)); - - check_roundtrip::(Filters::new().shuffle(false)); - check_roundtrip::(Filters::new().shuffle(true)); - } - - #[test] - pub fn test_fletcher32() { - assert!(!Filters::new().get_fletcher32()); - assert!(Filters::new().fletcher32(true).get_fletcher32()); - assert!(!Filters::new().fletcher32(true).fletcher32(false).get_fletcher32()); - - check_roundtrip::(Filters::new().fletcher32(false)); - check_roundtrip::(Filters::new().fletcher32(true)); - - check_roundtrip::(Filters::new().fletcher32(false)); - check_roundtrip::(Filters::new().fletcher32(true)); - } - - #[test] - pub fn test_scale_offset() { - let _e = silence_errors(); - - assert!(Filters::new().get_scale_offset().is_none()); - assert_eq!(Filters::new().scale_offset(8).get_scale_offset(), Some(8)); - assert!(Filters::new().scale_offset(8).no_scale_offset().get_scale_offset().is_none()); - - check_roundtrip::(Filters::new().no_scale_offset()); - check_roundtrip::(Filters::new().scale_offset(0)); - check_roundtrip::(Filters::new().scale_offset(8)); - - check_roundtrip::(Filters::new().no_scale_offset()); - assert_err!( - make_filters::(&Filters::new().scale_offset(0)), - "Can only use positive scale-offset factor with floats" - ); - check_roundtrip::(Filters::new().scale_offset(8)); - - assert_err!( - make_filters::(&Filters::new().scale_offset(u32::max_value())), - "Scale-offset factor too large" - ); - assert_err!( - make_filters::(&Filters::new().scale_offset(0).fletcher32(true)), - "Cannot use lossy scale-offset filter with fletcher32" - ); - } - - #[test] - pub fn test_filters_dcpl() { - let mut filters = Filters::new(); - filters.shuffle(true); - if gzip_available() { - filters.gzip_default(); - } - let datatype = Datatype::from_type::().unwrap(); - let dcpl = filters.to_dcpl(&datatype).unwrap(); - let filters2 = Filters::from_dcpl(&dcpl).unwrap(); - assert_eq!(filters2, filters); - } - - #[test] - pub fn test_has_filters() { - assert_eq!(Filters::default().has_filters(), false); - assert_eq!(Filters::default().gzip_default().has_filters(), true); - assert_eq!(Filters::default().szip_default().has_filters(), true); - assert_eq!(Filters::default().fletcher32(true).has_filters(), true); - assert_eq!(Filters::default().shuffle(true).has_filters(), true); - assert_eq!(Filters::default().scale_offset(2).has_filters(), true); - } -} diff --git a/src/globals.rs b/src/globals.rs index 24c4ef06f..b705055d3 100644 --- a/src/globals.rs +++ b/src/globals.rs @@ -4,39 +4,44 @@ use std::mem; use lazy_static::lazy_static; -#[cfg(h5_have_direct)] +#[cfg(feature = "have-direct")] use hdf5_sys::h5fd::H5FD_direct_init; -#[cfg(h5_have_parallel)] +#[cfg(feature = "have-parallel")] use hdf5_sys::h5fd::H5FD_mpio_init; use hdf5_sys::h5fd::{ H5FD_core_init, H5FD_family_init, H5FD_log_init, H5FD_multi_init, H5FD_sec2_init, H5FD_stdio_init, }; +use hdf5_sys::{h5e, h5p, h5t}; use crate::internal_prelude::*; -#[cfg(not(h5_dll_indirection))] -macro_rules! link_hid { - ($rust_name:ident, $mod_name:ident::$c_name:ident) => { - lazy_static! { - pub static ref $rust_name: ::hdf5_sys::h5i::hid_t = { - h5lock!(::hdf5_sys::h5::H5open()); - *::hdf5_sys::$mod_name::$c_name - }; +pub struct H5GlobalConstant( + #[cfg(msvc_dll_indirection)] &'static usize, + #[cfg(not(msvc_dll_indirection))] &'static hdf5_sys::h5i::hid_t, +); + +impl std::ops::Deref for H5GlobalConstant { + type Target = hdf5_sys::h5i::hid_t; + fn deref(&self) -> &Self::Target { + lazy_static::initialize(&crate::sync::LIBRARY_INIT); + cfg_if::cfg_if! { + if #[cfg(msvc_dll_indirection)] { + let dll_ptr = self.0 as *const usize; + let ptr: *const *const hdf5_sys::h5i::hid_t = dll_ptr.cast(); + unsafe { + &**ptr + } + } else { + self.0 + } } - }; + } } -// God damn dllimport... -#[cfg(h5_dll_indirection)] macro_rules! link_hid { - ($rust_name:ident, $mod_name:ident::$c_name:ident) => { - lazy_static! { - pub static ref $rust_name: ::hdf5_sys::h5i::hid_t = { - h5lock!(::hdf5_sys::h5::H5open()); - unsafe { *(*::hdf5_sys::$mod_name::$c_name as *const _) } - }; - } + ($rust_name:ident, $c_name:path) => { + pub static $rust_name: H5GlobalConstant = H5GlobalConstant($c_name); }; } @@ -185,6 +190,7 @@ link_hid!(H5E_DATATYPE, h5e::H5E_DATATYPE); link_hid!(H5E_RS, h5e::H5E_RS); link_hid!(H5E_HEAP, h5e::H5E_HEAP); link_hid!(H5E_OHDR, h5e::H5E_OHDR); +#[cfg(not(feature = "1.13.0"))] link_hid!(H5E_ATOM, h5e::H5E_ATOM); link_hid!(H5E_ATTR, h5e::H5E_ATTR); link_hid!(H5E_NONE_MAJOR, h5e::H5E_NONE_MAJOR); @@ -260,6 +266,7 @@ link_hid!(H5E_NOTHDF5, h5e::H5E_NOTHDF5); link_hid!(H5E_BADFILE, h5e::H5E_BADFILE); link_hid!(H5E_TRUNCATED, h5e::H5E_TRUNCATED); link_hid!(H5E_MOUNT, h5e::H5E_MOUNT); +#[cfg(not(feature = "1.13.0"))] link_hid!(H5E_BADATOM, h5e::H5E_BADATOM); link_hid!(H5E_BADGROUP, h5e::H5E_BADGROUP); link_hid!(H5E_CANTREGISTER, h5e::H5E_CANTREGISTER); @@ -331,21 +338,21 @@ lazy_static! { } // MPI-IO file driver -#[cfg(h5_have_parallel)] +#[cfg(feature = "have-parallel")] lazy_static! { pub static ref H5FD_MPIO: hid_t = unsafe { h5lock!(H5FD_mpio_init()) }; } -#[cfg(not(h5_have_parallel))] +#[cfg(not(feature = "have-parallel"))] lazy_static! { pub static ref H5FD_MPIO: hid_t = H5I_INVALID_HID; } // Direct VFD -#[cfg(h5_have_direct)] +#[cfg(feature = "have-direct")] lazy_static! { pub static ref H5FD_DIRECT: hid_t = unsafe { h5lock!(H5FD_direct_init()) }; } -#[cfg(not(h5_have_direct))] +#[cfg(not(feature = "have-direct"))] lazy_static! { pub static ref H5FD_DIRECT: hid_t = H5I_INVALID_HID; } diff --git a/src/handle.rs b/src/handle.rs index 67e0b62b4..4eeb05300 100644 --- a/src/handle.rs +++ b/src/handle.rs @@ -1,133 +1,95 @@ -use std::collections::HashMap; -use std::sync::Arc; +use std::mem; -use lazy_static::lazy_static; -use parking_lot::{Mutex, RwLock}; - -use hdf5_sys::h5i::{H5I_type_t, H5Idec_ref, H5Iget_type, H5Iinc_ref, H5Iis_valid}; +use hdf5_sys::h5i::{H5I_type_t, H5Idec_ref, H5Iget_ref, H5Iget_type, H5Iinc_ref, H5Iis_valid}; use crate::internal_prelude::*; -pub fn get_id_type(id: hid_t) -> H5I_type_t { - h5lock!({ - let tp = h5lock!(H5Iget_type(id)); - let valid = id > 0 && tp > H5I_BADID && tp < H5I_NTYPES; - if valid { - tp - } else { - H5I_BADID - } - }) -} - -pub fn is_valid_id(id: hid_t) -> bool { - h5lock!({ - let tp = get_id_type(id); - tp > H5I_BADID && tp < H5I_NTYPES - }) -} - -pub fn is_valid_user_id(id: hid_t) -> bool { - h5lock!({ H5Iis_valid(id) == 1 }) -} - -struct Registry { - registry: Mutex>>>, -} - -impl Default for Registry { - fn default() -> Self { - Self::new() - } -} - -impl Registry { - pub fn new() -> Self { - Self { registry: Mutex::new(HashMap::new()) } - } - - pub fn new_handle(&self, id: hid_t) -> Arc> { - let mut registry = self.registry.lock(); - let handle = registry.entry(id).or_insert_with(|| Arc::new(RwLock::new(id))); - if *handle.read() != id { - // an id may be left dangling by previous invalidation of a linked handle - *handle = Arc::new(RwLock::new(id)); - } - handle.clone() - } -} - +/// A handle to an HDF5 object +#[derive(Debug)] pub struct Handle { - id: Arc>, + id: hid_t, } impl Handle { + /// Create a handle from object ID, taking ownership of it pub fn try_new(id: hid_t) -> Result { - lazy_static! { - static ref REGISTRY: Registry = Registry::new(); + let handle = Self { id }; + if handle.is_valid_user_id() { + Ok(handle) + } else { + // Drop on an invalid handle could cause closing an unrelated object + // in the destructor, hence it's important to prevent the drop here. + mem::forget(handle); + Err(From::from(format!("Invalid handle id: {}", id))) } - h5lock!({ - if is_valid_user_id(id) { - Ok(Self { id: REGISTRY.new_handle(id) }) - } else { - Err(From::from(format!("Invalid handle id: {}", id))) - } - }) } - pub fn invalid() -> Self { - Self { id: Arc::new(RwLock::new(H5I_INVALID_HID)) } + /// Create a handle from object ID by cloning it + pub fn try_borrow(id: hid_t) -> Result { + // It's ok to just call try_new() since it may not decref the object + let handle = Self::try_new(id)?; + handle.incref(); + Ok(handle) } - pub fn id(&self) -> hid_t { - *self.id.read() + pub const fn invalid() -> Self { + Self { id: H5I_INVALID_HID } } - pub fn invalidate(&self) { - *self.id.write() = H5I_INVALID_HID; + pub const fn id(&self) -> hid_t { + self.id } + /// Increment the reference count of the handle pub fn incref(&self) { - if is_valid_user_id(self.id()) { - h5lock!(H5Iinc_ref(self.id())); + if self.is_valid_user_id() { + h5lock!(H5Iinc_ref(self.id)); } } + /// Decrease the reference count of the handle + /// + /// Note: This function should only be used if `incref` has been + /// previously called. pub fn decref(&self) { h5lock!({ if self.is_valid_id() { - H5Idec_ref(self.id()); + H5Idec_ref(self.id); } - // must invalidate all linked IDs because the library reuses them internally - if !self.is_valid_user_id() && !self.is_valid_id() { - self.invalidate(); - } - }) + }); } /// Returns `true` if the object has a valid unlocked identifier (`false` for pre-defined /// locked identifiers like property list classes). pub fn is_valid_user_id(&self) -> bool { - is_valid_user_id(self.id()) + h5lock!(H5Iis_valid(self.id)) == 1 } pub fn is_valid_id(&self) -> bool { - is_valid_id(self.id()) + matches!(self.id_type(), tp if tp > H5I_BADID && tp < H5I_NTYPES) + } + + /// Return the reference count of the object + pub fn refcount(&self) -> u32 { + h5call!(H5Iget_ref(self.id)).map(|x| x as _).unwrap_or(0) as _ } - pub fn decref_full(&self) { - while self.is_valid_user_id() { - self.decref(); + /// Get HDF5 object type as a native enum. + pub fn id_type(&self) -> H5I_type_t { + if self.id <= 0 { + H5I_BADID + } else { + match h5lock!(H5Iget_type(self.id)) { + tp if tp > H5I_BADID && tp < H5I_NTYPES => tp, + _ => H5I_BADID, + } } } } impl Clone for Handle { fn clone(&self) -> Self { - h5lock!({ - self.incref(); - Self::try_new(self.id()).unwrap_or_else(|_| Self::invalid()) - }) + Self::try_borrow(self.id).unwrap_or_else(|_| Self::invalid()) } } diff --git a/src/hl.rs b/src/hl.rs new file mode 100644 index 000000000..5c5081777 --- /dev/null +++ b/src/hl.rs @@ -0,0 +1,31 @@ +pub mod attribute; +pub mod container; +pub mod dataset; +pub mod dataspace; +pub mod datatype; +pub mod extents; +pub mod file; +pub mod filters; +pub mod group; +pub mod location; +pub mod object; +pub mod plist; +pub mod selection; + +pub use self::{ + attribute::{ + Attribute, AttributeBuilder, AttributeBuilderData, AttributeBuilderEmpty, + AttributeBuilderEmptyShape, + }, + container::{ByteReader, Container, Reader, Writer}, + dataset::{ + Dataset, DatasetBuilder, DatasetBuilderData, DatasetBuilderEmpty, DatasetBuilderEmptyShape, + }, + dataspace::Dataspace, + datatype::{Conversion, Datatype}, + file::{File, FileBuilder, OpenMode}, + group::{Group, LinkInfo, LinkType}, + location::{Location, LocationInfo, LocationToken, LocationType}, + object::Object, + plist::PropertyList, +}; diff --git a/src/hl/attribute.rs b/src/hl/attribute.rs new file mode 100644 index 000000000..0f700fe0b --- /dev/null +++ b/src/hl/attribute.rs @@ -0,0 +1,430 @@ +use std::fmt::{self, Debug}; +use std::ops::Deref; + +use hdf5_sys::{ + h5::{H5_index_t, H5_iter_order_t}, + h5a::{H5A_info_t, H5A_operator2_t, H5Acreate2, H5Adelete, H5Aiterate2}, +}; +use hdf5_types::TypeDescriptor; +use ndarray::ArrayView; + +use crate::internal_prelude::*; + +/// Represents the HDF5 attribute object. +#[repr(transparent)] +#[derive(Clone)] +pub struct Attribute(Handle); + +impl ObjectClass for Attribute { + const NAME: &'static str = "attribute"; + const VALID_TYPES: &'static [H5I_type_t] = &[H5I_ATTR]; + + fn from_handle(handle: Handle) -> Self { + Self(handle) + } + + fn handle(&self) -> &Handle { + &self.0 + } + + // TODO: short_repr() +} + +impl Debug for Attribute { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.debug_fmt(f) + } +} + +impl Deref for Attribute { + type Target = Container; + + fn deref(&self) -> &Container { + unsafe { self.transmute() } + } +} + +impl Attribute { + /// Returns names of all the members in the group, non-recursively. + pub fn attr_names(obj: &Location) -> Result> { + extern "C" fn attributes_callback( + _id: hid_t, attr_name: *const c_char, _info: *const H5A_info_t, op_data: *mut c_void, + ) -> herr_t { + std::panic::catch_unwind(|| { + let other_data: &mut Vec = + unsafe { &mut *(op_data.cast::>()) }; + other_data.push(string_from_cstr(attr_name)); + 0 // Continue iteration + }) + .unwrap_or(-1) + } + + let callback_fn: H5A_operator2_t = Some(attributes_callback); + let iteration_position: *mut hsize_t = &mut { 0_u64 }; + let mut result: Vec = Vec::new(); + let other_data: *mut c_void = &mut result as *const _ as *mut c_void; + + h5call!(H5Aiterate2( + obj.handle().id(), + H5_index_t::H5_INDEX_NAME, + H5_iter_order_t::H5_ITER_INC, + iteration_position, + callback_fn, + other_data + ))?; + + Ok(result) + } +} + +#[derive(Clone)] +/// An attribute builder +pub struct AttributeBuilder { + builder: AttributeBuilderInner, +} + +impl AttributeBuilder { + pub fn new(parent: &Location) -> Self { + Self { builder: AttributeBuilderInner::new(parent) } + } + + pub fn empty(self) -> AttributeBuilderEmpty { + self.empty_as(&T::type_descriptor()) + } + + pub fn empty_as(self, type_desc: &TypeDescriptor) -> AttributeBuilderEmpty { + AttributeBuilderEmpty { builder: self.builder, type_desc: type_desc.clone() } + } + + pub fn with_data<'d, A, T, D>(self, data: A) -> AttributeBuilderData<'d, T, D> + where + A: Into>, + T: H5Type, + D: ndarray::Dimension, + { + self.with_data_as::(data, &T::type_descriptor()) + } + + pub fn with_data_as<'d, A, T, D>( + self, data: A, type_desc: &TypeDescriptor, + ) -> AttributeBuilderData<'d, T, D> + where + A: Into>, + T: H5Type, + D: ndarray::Dimension, + { + AttributeBuilderData { + builder: self.builder, + data: data.into(), + type_desc: type_desc.clone(), + conv: Conversion::Soft, + } + } + + #[inline] + #[must_use] + pub fn packed(mut self, packed: bool) -> Self { + self.builder.packed(packed); + self + } +} + +#[derive(Clone)] +/// An attribute builder with the type known +pub struct AttributeBuilderEmpty { + builder: AttributeBuilderInner, + type_desc: TypeDescriptor, +} + +impl AttributeBuilderEmpty { + pub fn shape>(self, extents: S) -> AttributeBuilderEmptyShape { + AttributeBuilderEmptyShape { + builder: self.builder, + type_desc: self.type_desc, + extents: extents.into(), + } + } + pub fn create<'n, T: Into<&'n str>>(self, name: T) -> Result { + self.shape(()).create(name) + } + + #[inline] + #[must_use] + pub fn packed(mut self, packed: bool) -> Self { + self.builder.packed(packed); + self + } +} + +#[derive(Clone)] +/// An attribute builder with type and shape known +pub struct AttributeBuilderEmptyShape { + builder: AttributeBuilderInner, + type_desc: TypeDescriptor, + extents: Extents, +} + +impl AttributeBuilderEmptyShape { + pub fn create<'n, T: Into<&'n str>>(&self, name: T) -> Result { + h5lock!(self.builder.create(&self.type_desc, name.into(), &self.extents)) + } + + #[inline] + #[must_use] + pub fn packed(mut self, packed: bool) -> Self { + self.builder.packed(packed); + self + } +} + +#[derive(Clone)] +/// An attribute builder with type, shape, and data known +pub struct AttributeBuilderData<'d, T, D> { + builder: AttributeBuilderInner, + data: ArrayView<'d, T, D>, + type_desc: TypeDescriptor, + conv: Conversion, +} + +impl<'d, T, D> AttributeBuilderData<'d, T, D> +where + T: H5Type, + D: ndarray::Dimension, +{ + /// Set maximum allowed conversion level. + pub fn conversion(mut self, conv: Conversion) -> Self { + self.conv = conv; + self + } + + /// Disallow all conversions. + pub fn no_convert(mut self) -> Self { + self.conv = Conversion::NoOp; + self + } + + pub fn create<'n, N: Into<&'n str>>(&self, name: N) -> Result { + ensure!( + self.data.is_standard_layout(), + "input array is not in standard layout or is not contiguous" + ); // TODO: relax this when it's supported in the writer + let extents = Extents::from(self.data.shape()); + let name = name.into(); + + h5lock!({ + let dtype_src = Datatype::from_type::()?; + let dtype_dst = Datatype::from_descriptor(&self.type_desc)?; + dtype_src.ensure_convertible(&dtype_dst, self.conv)?; + let ds = self.builder.create(&self.type_desc, name, &extents)?; + if let Err(err) = ds.write(self.data.view()) { + self.builder.try_unlink(name); + Err(err) + } else { + Ok(ds) + } + }) + } + + #[inline] + #[must_use] + pub fn packed(mut self, packed: bool) -> Self { + self.builder.packed(packed); + self + } +} + +#[derive(Clone)] +/// The true internal dataset builder +struct AttributeBuilderInner { + parent: Result, + packed: bool, +} + +impl AttributeBuilderInner { + pub fn new(parent: &Location) -> Self { + Self { parent: parent.try_borrow(), packed: false } + } + + pub fn packed(&mut self, packed: bool) { + self.packed = packed; + } + + unsafe fn create( + &self, desc: &TypeDescriptor, name: &str, extents: &Extents, + ) -> Result { + // construct in-file type descriptor; convert to packed representation if needed + let desc = if self.packed { desc.to_packed_repr() } else { desc.to_c_repr() }; + + let datatype = Datatype::from_descriptor(&desc)?; + let parent = try_ref_clone!(self.parent); + + let dataspace = Dataspace::try_new(extents)?; + + let name = to_cstring(name)?; + Attribute::from_id(h5try!(H5Acreate2( + parent.id(), + name.as_ptr(), + datatype.id(), + dataspace.id(), + // these args are currently unused as if HDF5 1.12 + // see details: https://portal.hdfgroup.org/display/HDF5/H5A_CREATE2 + H5P_DEFAULT, + H5P_DEFAULT, + ))) + } + + fn try_unlink(&self, name: &str) { + let name = to_cstring(name).unwrap(); + if let Ok(parent) = &self.parent { + h5lock!(H5Adelete(parent.id(), name.as_ptr())); + } + } +} + +#[cfg(test)] +pub mod attribute_tests { + use crate::internal_prelude::*; + use ndarray::{arr2, Array2}; + use std::str::FromStr; + use types::VarLenUnicode; + + #[test] + pub fn test_shape_ndim_size() { + with_tmp_file(|file| { + let d = file.new_attr::().shape((2, 3)).create("name1").unwrap(); + assert_eq!(d.shape(), vec![2, 3]); + assert_eq!(d.size(), 6); + assert_eq!(d.ndim(), 2); + assert_eq!(d.is_scalar(), false); + + let d = file.new_attr::().shape(()).create("name2").unwrap(); + assert_eq!(d.shape(), vec![]); + assert_eq!(d.size(), 1); + assert_eq!(d.ndim(), 0); + assert_eq!(d.is_scalar(), true); + }) + } + + #[test] + pub fn test_get_file_attr_names() { + with_tmp_file(|file| { + let _ = file.new_attr::().shape((2, 3)).create("name1").unwrap(); + let _ = file.new_attr::().shape(()).create("name2").unwrap(); + + let attr_names = file.attr_names().unwrap(); + assert_eq!(attr_names.len(), 2); + assert!(attr_names.contains(&"name1".to_string())); + assert!(attr_names.contains(&"name2".to_string())); + }) + } + + #[test] + pub fn test_get_dataset_attr_names() { + with_tmp_file(|file| { + let ds = file.new_dataset::().shape((10, 10)).create("d1").unwrap(); + + let _ = ds.new_attr::().shape((2, 3)).create("name1").unwrap(); + let _ = ds.new_attr::().shape(()).create("name2").unwrap(); + + let attr_names = ds.attr_names().unwrap(); + assert_eq!(attr_names.len(), 2); + assert!(attr_names.contains(&"name1".to_string())); + assert!(attr_names.contains(&"name2".to_string())); + }) + } + + #[test] + pub fn test_datatype() { + with_tmp_file(|file| { + assert_eq!( + file.new_attr::().shape(1).create("name").unwrap().dtype().unwrap(), + Datatype::from_type::().unwrap() + ); + }) + } + + #[test] + pub fn test_read_write() { + with_tmp_file(|file| { + let arr = arr2(&[[1, 2, 3], [4, 5, 6]]); + + let attr = file.new_attr::().shape((2, 3)).create("foo").unwrap(); + attr.as_writer().write(&arr).unwrap(); + + let read_attr = file.attr("foo").unwrap(); + assert_eq!(read_attr.shape(), vec![2, 3]); + + let arr_dyn: Array2<_> = read_attr.as_reader().read().unwrap(); + + assert_eq!(arr, arr_dyn.into_dimensionality().unwrap()); + }) + } + + #[test] + pub fn test_create() { + with_tmp_file(|file| { + let attr = file.new_attr::().shape((1, 2)).create("foo").unwrap(); + assert!(attr.is_valid()); + assert_eq!(attr.shape(), vec![1, 2]); + // FIXME - attr.name() returns "/" here, which is the name the attribute is connected to, + // not the name of the attribute. + //assert_eq!(attr.name(), "foo"); + assert_eq!(file.attr("foo").unwrap().shape(), vec![1, 2]); + }) + } + + #[test] + pub fn test_create_with_data() { + with_tmp_file(|file| { + let arr = arr2(&[[1, 2, 3], [4, 5, 6]]); + + let attr = file.new_attr_builder().with_data(&arr).create("foo").unwrap(); + assert!(attr.is_valid()); + assert_eq!(attr.shape(), vec![2, 3]); + // FIXME - attr.name() returns "/" here, which is the name the attribute is connected to, + // not the name of the attribute. + //assert_eq!(attr.name(), "foo"); + assert_eq!(file.attr("foo").unwrap().shape(), vec![2, 3]); + + let read_attr = file.attr("foo").unwrap(); + assert_eq!(read_attr.shape(), vec![2, 3]); + let arr_dyn: Array2<_> = read_attr.as_reader().read().unwrap(); + assert_eq!(arr, arr_dyn.into_dimensionality().unwrap()); + }) + } + + #[test] + pub fn test_missing() { + with_tmp_file(|file| { + let _ = file.new_attr::().shape((1, 2)).create("foo").unwrap(); + let missing_result = file.attr("bar"); + assert!(missing_result.is_err()); + }) + } + + #[test] + pub fn test_write_read_str() { + with_tmp_file(|file| { + let s = VarLenUnicode::from_str("var len foo").unwrap(); + let attr = file.new_attr::().shape(()).create("foo").unwrap(); + attr.as_writer().write_scalar(&s).unwrap(); + let read_attr = file.attr("foo").unwrap(); + assert_eq!(read_attr.shape(), []); + let r: VarLenUnicode = read_attr.as_reader().read_scalar().unwrap(); + assert_eq!(r, s); + }) + } + + #[test] + pub fn test_list_names() { + with_tmp_file(|file| { + let arr1 = arr2(&[[123], [456]]); + let _attr1 = file.new_attr_builder().with_data(&arr1).create("foo").unwrap(); + let _attr2 = file.new_attr_builder().with_data("string").create("bar").unwrap(); + let attr_names = file.attr_names().unwrap(); + assert_eq!(attr_names.len(), 2); + assert!(attr_names.contains(&"foo".to_string())); + assert!(attr_names.contains(&"bar".to_string())); + }) + } +} diff --git a/src/hl/container.rs b/src/hl/container.rs index 613ed367f..ec0ffab77 100644 --- a/src/hl/container.rs +++ b/src/hl/container.rs @@ -1,9 +1,10 @@ +use std::convert::TryInto; use std::fmt::{self, Debug}; +use std::io; use std::mem; use std::ops::Deref; use ndarray::{Array, Array1, Array2, ArrayD, ArrayView, ArrayView1}; -use ndarray::{SliceInfo, SliceOrIndex}; use hdf5_sys::h5a::{H5Aget_space, H5Aget_storage_size, H5Aget_type, H5Aread, H5Awrite}; use hdf5_sys::h5d::{H5Dget_space, H5Dget_storage_size, H5Dget_type, H5Dread, H5Dwrite}; @@ -46,14 +47,16 @@ impl<'a> Reader<'a> { let (obj_id, tp_id) = (self.obj.id(), mem_dtype.id()); if self.obj.is_attr() { - h5try!(H5Aread(obj_id, tp_id, buf as *mut _)); + h5try!(H5Aread(obj_id, tp_id, buf.cast())); } else { let fspace_id = fspace.map_or(H5S_ALL, |f| f.id()); let mspace_id = mspace.map_or(H5S_ALL, |m| m.id()); let xfer = PropertyList::from_id(h5call!(H5Pcreate(*crate::globals::H5P_DATASET_XFER))?)?; - crate::hl::plist::set_vlen_manager_libc(xfer.id())?; - h5try!(H5Dread(obj_id, tp_id, mspace_id, fspace_id, xfer.id(), buf as *mut _)); + if !hdf5_types::USING_H5_ALLOCATOR { + crate::hl::plist::set_vlen_manager_libc(xfer.id())?; + } + h5try!(H5Dread(obj_id, tp_id, mspace_id, fspace_id, xfer.id(), buf.cast())); } Ok(()) } @@ -63,80 +66,47 @@ impl<'a> Reader<'a> { /// the slice, after singleton dimensions are dropped. /// Use the multi-dimensional slice macro `s![]` from `ndarray` to conveniently create /// a multidimensional slice. - pub fn read_slice(&self, slice: &SliceInfo) -> Result> + pub fn read_slice(&self, selection: S) -> Result> where T: H5Type, - S: AsRef<[SliceOrIndex]>, + S: TryInto, + Error: From, D: ndarray::Dimension, { - ensure!(!self.obj.is_attr(), "slicing cannot be used on attribute datasets"); + ensure!(!self.obj.is_attr(), "Slicing cannot be used on attribute datasets"); - let shape = self.obj.get_shape()?; + let selection = selection.try_into()?; + let obj_space = self.obj.space()?; - let slice_s: &[SliceOrIndex] = slice.as_ref(); - let slice_dim = slice_s.len(); - if shape.ndim() != slice_dim { - let obj_ndim = shape.ndim(); + let out_shape = selection.out_shape(&obj_space.shape())?; + let out_size: Ix = out_shape.iter().product(); + let fspace = obj_space.select(selection)?; + + if let Some(ndim) = D::NDIM { + let out_ndim = out_shape.len(); + ensure!(ndim == out_ndim, "Selection ndim ({}) != array ndim ({})", out_ndim, ndim); + } else { + let fsize = fspace.selection_size(); ensure!( - obj_ndim == slice_dim, - "slice dimension mismatch: dataset has {} dims, slice has {} dims", - obj_ndim, - slice_dim + out_size == fsize, + "Selected size mismatch: {} != {} (shouldn't happen)", + out_size, + fsize ); } - if shape.ndim() == 0 { - // Check that return dimensionality is 0. - if let Some(ndim) = D::NDIM { - let obj_ndim = 0; - ensure!( - obj_ndim == ndim, - "ndim mismatch: slice outputs dims {}, output type dims {}", - obj_ndim, - ndim - ); - } - - // Fall back to a simple read for the scalar case - // Slicing has no effect + if out_size == 0 { + Ok(unsafe { Array::from_shape_vec_unchecked(out_shape, vec![]).into_dimensionality()? }) + } else if obj_space.ndim() == 0 { self.read() } else { - let fspace = self.obj.space()?; - let out_shape = fspace.select_slice(slice)?; - - // Remove dimensions from out_shape that were SliceOrIndex::Index in the slice - let reduced_shape: Vec<_> = slice_s - .iter() - .zip(out_shape.iter().cloned()) - .filter_map(|(slc, sz)| match slc { - SliceOrIndex::Index(_) => None, - _ => Some(sz), - }) - .collect(); - - // *Output* dimensionality must match the reduced shape, - // (i.e. dimensionality after singleton 'SliceOrIndex::Index' - // axes are dropped. - if let Some(ndim) = D::NDIM { - let obj_ndim = reduced_shape.len(); - ensure!( - obj_ndim == ndim, - "ndim mismatch: slice outputs dims {}, output type dims {}", - obj_ndim, - ndim - ); - } - - let mspace = Dataspace::try_new(&out_shape, false)?; - let size = out_shape.iter().product(); - let mut vec = Vec::with_capacity(size); - - self.read_into_buf(vec.as_mut_ptr(), Some(&fspace), Some(&mspace))?; + let mspace = Dataspace::try_new(&out_shape)?; + let mut buf = Vec::with_capacity(out_size); + self.read_into_buf(buf.as_mut_ptr(), Some(&fspace), Some(&mspace))?; unsafe { - vec.set_len(size); - } - - let arr = ArrayD::from_shape_vec(reduced_shape, vec)?; + buf.set_len(out_size); + }; + let arr = ArrayD::from_shape_vec(out_shape, buf)?; Ok(arr.into_dimensionality()?) } } @@ -177,12 +147,13 @@ impl<'a> Reader<'a> { /// Reads the given `slice` of the dataset into a 1-dimensional array. /// The slice must yield a 1-dimensional result. - pub fn read_slice_1d(&self, slice: &SliceInfo) -> Result> + pub fn read_slice_1d(&self, selection: S) -> Result> where T: H5Type, - S: AsRef<[SliceOrIndex]>, + S: TryInto, + Error: From, { - self.read_slice(slice) + self.read_slice(selection) } /// Reads a dataset/attribute into a 2-dimensional array. @@ -194,12 +165,13 @@ impl<'a> Reader<'a> { /// Reads the given `slice` of the dataset into a 2-dimensional array. /// The slice must yield a 2-dimensional result. - pub fn read_slice_2d(&self, slice: &SliceInfo) -> Result> + pub fn read_slice_2d(&self, selection: S) -> Result> where T: H5Type, - S: AsRef<[SliceOrIndex]>, + S: TryInto, + Error: From, { - self.read_slice(slice) + self.read_slice(selection) } /// Reads a dataset/attribute into an array with dynamic number of dimensions. @@ -251,11 +223,11 @@ impl<'a> Writer<'a> { let (obj_id, tp_id) = (self.obj.id(), mem_dtype.id()); if self.obj.is_attr() { - h5try!(H5Awrite(obj_id, tp_id, buf as *const _)); + h5try!(H5Awrite(obj_id, tp_id, buf.cast())); } else { let fspace_id = fspace.map_or(H5S_ALL, |f| f.id()); let mspace_id = mspace.map_or(H5S_ALL, |m| m.id()); - h5try!(H5Dwrite(obj_id, tp_id, mspace_id, fspace_id, H5P_DEFAULT, buf as *const _)); + h5try!(H5Dwrite(obj_id, tp_id, mspace_id, fspace_id, H5P_DEFAULT, buf.cast())); } Ok(()) } @@ -265,69 +237,55 @@ impl<'a> Writer<'a> { /// If the array has a fixed number of dimensions, it must match the dimensionality of /// dataset. Use the multi-dimensional slice macro `s![]` from `ndarray` to conveniently create /// a multidimensional slice. - pub fn write_slice<'b, A, T, S, D>(&self, arr: A, slice: &SliceInfo) -> Result<()> + pub fn write_slice<'b, A, T, S, D>(&self, arr: A, selection: S) -> Result<()> where A: Into>, T: H5Type, - S: AsRef<[SliceOrIndex]>, + S: TryInto, + Error: From, D: ndarray::Dimension, { - ensure!(!self.obj.is_attr(), "slicing cannot be used on attribute datasets"); + ensure!(!self.obj.is_attr(), "Slicing cannot be used on attribute datasets"); - let shape = self.obj.get_shape()?; - let slice_s: &[SliceOrIndex] = slice.as_ref(); - let slice_dim = slice_s.len(); - if shape.ndim() != slice_dim { - let obj_ndim = shape.ndim(); + let selection = selection.try_into()?; + let obj_space = self.obj.space()?; + + let out_shape = selection.out_shape(&obj_space.shape())?; + let out_size: Ix = out_shape.iter().product(); + let fspace = obj_space.select(selection)?; + let view = arr.into(); + + if let Some(ndim) = D::NDIM { + let out_ndim = out_shape.len(); + ensure!(ndim == out_ndim, "Selection ndim ({}) != array ndim ({})", out_ndim, ndim); + } else { + let fsize = fspace.selection_size(); ensure!( - obj_ndim == slice_dim, - "slice dimension mismatch: dataset has {} dims, slice has {} dims", - obj_ndim, - slice_dim + out_size == fsize, + "Selected size mismatch: {} != {} (shouldn't happen)", + out_size, + fsize + ); + ensure!( + view.shape() == out_shape.as_slice(), + "Shape mismatch: memory ({:?}) != destination ({:?})", + view.shape(), + out_shape ); } - if shape.ndim() == 0 { - // Fall back to a simple read for the scalar case - // Slicing has no effect - self.write(arr) + if out_size == 0 { + Ok(()) + } else if obj_space.ndim() == 0 { + self.write(view) } else { - let fspace = self.obj.space()?; - let slice_shape = fspace.select_slice(slice)?; - - let view = arr.into(); - let data_shape = view.shape(); - - // Restore dimensions that are SliceOrIndex::Index in the slice. - let mut data_shape_hydrated = Vec::new(); - let mut pos = 0; - for s in slice_s { - if let SliceOrIndex::Index(_) = s { - data_shape_hydrated.push(1); - } else { - data_shape_hydrated.push(data_shape[pos]); - pos += 1; - } - } - - let mspace = Dataspace::try_new(&slice_shape, false)?; - - // FIXME - we can handle non-standard input arrays by creating a memory space - // that reflects the same slicing/ordering that this ArrayView represents. - // we could also convert the array into a standard layout, but this is probably expensive. + let mspace = Dataspace::try_new(view.shape())?; + // TODO: support strided arrays (C-ordering we have to require regardless) ensure!( view.is_standard_layout(), - "input array is not in standard layout or is not contiguous" + "Input array is not in standard layout or non-contiguous" ); - if slice_shape != data_shape_hydrated { - fail!( - "shape mismatch when writing slice: memory = {:?}, destination = {:?}", - data_shape_hydrated, - slice_shape - ); - } - self.write_from_buf(view.as_ptr(), Some(&fspace), Some(&mspace)) } } @@ -389,8 +347,102 @@ impl<'a> Writer<'a> { } } +#[derive(Debug, Clone)] +pub struct ByteReader { + obj: Container, + pos: u64, + dt: Datatype, + obj_space: Dataspace, + xfer: PropertyList, +} + +impl ByteReader { + pub fn new(obj: &Container) -> Result { + ensure!(!obj.is_attr(), "ByteReader cannot be used on attribute datasets"); + + let obj = obj.clone(); + let file_dtype = obj.dtype()?; + let mem_dtype = Datatype::from_type::()?; + file_dtype.ensure_convertible(&mem_dtype, Conversion::NoOp)?; + + let obj_space = obj.space()?; + ensure!(obj_space.shape().len() == 1, "Only rank 1 datasets can be read via ByteReader"); + let xfer = PropertyList::from_id(h5call!(H5Pcreate(*crate::globals::H5P_DATASET_XFER))?)?; + if !hdf5_types::USING_H5_ALLOCATOR { + crate::hl::plist::set_vlen_manager_libc(xfer.id())?; + } + Ok(ByteReader { obj, pos: 0, obj_space, dt: mem_dtype, xfer }) + } + + fn dataset_len(&self) -> usize { + self.obj_space.shape()[0] + } + + fn remaining_len(&self) -> usize { + self.dataset_len().saturating_sub(self.pos as usize) + } + + pub fn is_empty(&self) -> bool { + self.pos >= self.dataset_len() as u64 + } +} + +impl io::Read for ByteReader { + fn read(&mut self, buf: &mut [u8]) -> io::Result { + let pos = self.pos as usize; + let amt = std::cmp::min(buf.len(), self.remaining_len()); + let selection = Selection::new(pos..pos + amt); + let out_shape = selection.out_shape(&self.obj_space.shape())?; + let fspace = self.obj_space.select(selection)?; + let mspace = Dataspace::try_new(&out_shape)?; + h5call!(H5Dread( + self.obj.id(), + self.dt.id(), + mspace.id(), + fspace.id(), + self.xfer.id(), + buf.as_mut_ptr().cast() + ))?; + self.pos += amt as u64; + Ok(out_shape[0]) + } +} + +impl io::Seek for ByteReader { + fn seek(&mut self, style: io::SeekFrom) -> io::Result { + let (base_pos, offset) = match style { + io::SeekFrom::Start(n) => { + self.pos = n; + return Ok(n); + } + io::SeekFrom::End(n) => (self.dataset_len() as u64, n), + io::SeekFrom::Current(n) => (self.pos, n), + }; + let new_pos = if offset.is_negative() { + base_pos.checked_sub(offset.wrapping_abs() as u64) + } else { + base_pos.checked_add(offset as u64) + }; + match new_pos { + Some(n) => { + self.pos = n; + Ok(self.pos) + } + None => Err(io::Error::new( + io::ErrorKind::InvalidInput, + "invalid seek to a negative or overflowing position", + )), + } + } + + fn stream_position(&mut self) -> io::Result { + Ok(self.pos) + } +} + #[repr(transparent)] #[derive(Clone)] +/// An object which can be read or written to. pub struct Container(Handle); impl ObjectClass for Container { @@ -422,7 +474,7 @@ impl Deref for Container { impl Container { pub(crate) fn is_attr(&self) -> bool { - get_id_type(self.id()) == H5I_ATTR + self.handle().id_type() == H5I_ATTR } /// Creates a reader wrapper for this dataset/attribute, allowing to @@ -437,6 +489,14 @@ impl Container { Writer::new(self) } + /// Creates `ByteReader` which implements [`Read`](std::io::Read) + /// and [`Seek`](std::io::Seek). + /// + /// ``ByteReader`` only supports 1-D `u8` datasets. + pub fn as_byte_reader(&self) -> Result { + ByteReader::new(&self) + } + /// Returns the datatype of the dataset/attribute. pub fn dtype(&self) -> Result { if self.is_attr() { @@ -457,12 +517,12 @@ impl Container { #[doc(hidden)] pub fn get_shape(&self) -> Result> { - self.space().map(|s| s.dims()) + self.space().map(|s| s.shape()) } /// Returns the shape of the dataset/attribute. pub fn shape(&self) -> Vec { - self.space().ok().map_or_else(Vec::new, |s| s.dims()) + self.space().ok().map_or_else(Vec::new, |s| s.shape()) } /// Returns the number of dimensions in the dataset/attribute. @@ -472,12 +532,12 @@ impl Container { /// Returns the total number of elements in the dataset/attribute. pub fn size(&self) -> usize { - self.shape().size() + self.shape().iter().product() } /// Returns whether this dataset/attribute is a scalar. pub fn is_scalar(&self) -> bool { - self.ndim() == 0 + self.space().ok().map_or(false, |s| s.is_scalar()) } /// Returns the amount of file space required for the dataset/attribute. Note that this @@ -512,12 +572,13 @@ impl Container { /// Reads the given `slice` of the dataset into a 1-dimensional array. /// The slice must yield a 1-dimensional result. - pub fn read_slice_1d(&self, slice: &SliceInfo) -> Result> + pub fn read_slice_1d(&self, selection: S) -> Result> where T: H5Type, - S: AsRef<[SliceOrIndex]>, + S: TryInto, + Error: From, { - self.as_reader().read_slice_1d(slice) + self.as_reader().read_slice_1d(selection) } /// Reads a dataset/attribute into a 2-dimensional array. @@ -529,12 +590,13 @@ impl Container { /// Reads the given `slice` of the dataset into a 2-dimensional array. /// The slice must yield a 2-dimensional result. - pub fn read_slice_2d(&self, slice: &SliceInfo) -> Result> + pub fn read_slice_2d(&self, selection: S) -> Result> where T: H5Type, - S: AsRef<[SliceOrIndex]>, + S: TryInto, + Error: From, { - self.as_reader().read_slice_2d(slice) + self.as_reader().read_slice_2d(selection) } /// Reads a dataset/attribute into an array with dynamic number of dimensions. @@ -547,13 +609,14 @@ impl Container { /// the slice, after singleton dimensions are dropped. /// Use the multi-dimensional slice macro `s![]` from `ndarray` to conveniently create /// a multidimensional slice. - pub fn read_slice(&self, slice: &SliceInfo) -> Result> + pub fn read_slice(&self, selection: S) -> Result> where T: H5Type, - S: AsRef<[SliceOrIndex]>, + S: TryInto, + Error: From, D: ndarray::Dimension, { - self.as_reader().read_slice(slice) + self.as_reader().read_slice(selection) } /// Reads a scalar dataset/attribute. @@ -592,14 +655,15 @@ impl Container { /// If the array has a fixed number of dimensions, it must match the dimensionality of /// dataset. Use the multi-dimensional slice macro `s![]` from `ndarray` to conveniently create /// a multidimensional slice. - pub fn write_slice<'b, A, T, S, D>(&self, arr: A, slice: &SliceInfo) -> Result<()> + pub fn write_slice<'b, A, T, S, D>(&self, arr: A, selection: S) -> Result<()> where A: Into>, T: H5Type, - S: AsRef<[SliceOrIndex]>, + S: TryInto, + Error: From, D: ndarray::Dimension, { - self.as_writer().write_slice(arr, slice) + self.as_writer().write_slice(arr, selection) } /// Writes a scalar dataset/attribute. diff --git a/src/hl/dataset.rs b/src/hl/dataset.rs index a04808316..cf5dc9795 100644 --- a/src/hl/dataset.rs +++ b/src/hl/dataset.rs @@ -1,27 +1,37 @@ use std::fmt::{self, Debug}; -use std::mem; use std::ops::Deref; -use num_integer::div_floor; +use ndarray::{self, ArrayView}; -#[cfg(hdf5_1_10_5)] +use hdf5_sys::h5::HADDR_UNDEF; +use hdf5_sys::h5d::{ + H5Dcreate2, H5Dcreate_anon, H5Dget_access_plist, H5Dget_create_plist, H5Dget_offset, + H5Dset_extent, +}; +#[cfg(feature = "1.10.5")] use hdf5_sys::h5d::{H5Dget_chunk_info, H5Dget_num_chunks}; -use hdf5_sys::{ - h5::HADDR_UNDEF, - h5d::{ - H5D_fill_value_t, H5D_layout_t, H5Dcreate2, H5Dcreate_anon, H5Dget_create_plist, - H5Dget_offset, H5Dset_extent, H5D_FILL_TIME_ALLOC, - }, - h5p::{ - H5Pcreate, H5Pfill_value_defined, H5Pget_chunk, H5Pget_fill_value, H5Pget_layout, - H5Pget_obj_track_times, H5Pset_chunk, H5Pset_create_intermediate_group, H5Pset_fill_time, - H5Pset_fill_value, H5Pset_obj_track_times, - }, +use hdf5_sys::h5l::H5Ldelete; +use hdf5_sys::h5p::H5P_DEFAULT; +use hdf5_sys::h5z::H5Z_filter_t; +use hdf5_types::{OwnedDynValue, TypeDescriptor}; + +#[cfg(feature = "blosc")] +use crate::hl::filters::{Blosc, BloscShuffle}; +use crate::hl::filters::{Filter, SZip, ScaleOffset}; +#[cfg(feature = "1.10.0")] +use crate::hl::plist::dataset_access::VirtualView; +use crate::hl::plist::dataset_access::{DatasetAccess, DatasetAccessBuilder}; +#[cfg(feature = "1.10.0")] +use crate::hl::plist::dataset_create::ChunkOpts; +use crate::hl::plist::dataset_create::{ + AllocTime, AttrCreationOrder, DatasetCreate, DatasetCreateBuilder, FillTime, Layout, }; - -use crate::globals::H5P_LINK_CREATE; +use crate::hl::plist::link_create::{CharEncoding, LinkCreate, LinkCreateBuilder}; use crate::internal_prelude::*; +/// Default chunk size when filters are enabled and the chunk size is not specified. +pub const DEFAULT_CHUNK_SIZE_KB: usize = 64 * 1024; + /// Represents the HDF5 dataset object. #[repr(transparent)] #[derive(Clone)] @@ -56,15 +66,7 @@ impl Deref for Dataset { } } -#[derive(Clone, Debug, PartialEq, Eq)] -pub enum Chunk { - None, - Auto, - Infer, - Manual(Vec), -} - -#[cfg(hdf5_1_10_5)] +#[cfg(feature = "1.10.5")] #[derive(Clone, Debug, PartialEq, Eq)] pub struct ChunkInfo { /// Array with a size equal to the dataset’s rank whose elements contain 0-based @@ -81,32 +83,57 @@ pub struct ChunkInfo { pub size: u64, } -#[cfg(hdf5_1_10_5)] +#[cfg(feature = "1.10.5")] impl ChunkInfo { pub(crate) fn new(ndim: usize) -> Self { - let mut offset = Vec::with_capacity(ndim); - unsafe { offset.set_len(ndim) }; + let offset = vec![0; ndim]; Self { offset, filter_mask: 0, addr: 0, size: 0 } } + + /// Returns positional indices of disabled filters. + pub fn disabled_filters(&self) -> Vec { + (0..32).filter(|i| self.filter_mask & (1 << i) != 0).collect() + } } impl Dataset { - /// Returns whether this dataset is resizable along some axis. + /// Returns a copy of the dataset access property list. + pub fn access_plist(&self) -> Result { + h5lock!(DatasetAccess::from_id(h5try!(H5Dget_access_plist(self.id())))) + } + + /// A short alias for `access_plist()`. + pub fn dapl(&self) -> Result { + self.access_plist() + } + + /// Returns a copy of the dataset creation property list. + pub fn create_plist(&self) -> Result { + h5lock!(DatasetCreate::from_id(h5try!(H5Dget_create_plist(self.id())))) + } + + /// A short alias for `create_plist()`. + pub fn dcpl(&self) -> Result { + self.create_plist() + } + + /// Returns `true` if this dataset is resizable along at least one axis. pub fn is_resizable(&self) -> bool { - h5lock!(self.space().ok().map_or(false, |s| s.resizable())) + h5lock!(self.space().ok().map_or(false, |s| s.is_resizable())) } - /// Returns whether this dataset has a chunked layout. + /// Returns `true` if this dataset has a chunked layout. pub fn is_chunked(&self) -> bool { - h5lock!({ - self.dcpl_id() - .ok() - .map_or(false, |dcpl_id| H5Pget_layout(dcpl_id) == H5D_layout_t::H5D_CHUNKED) - }) + self.layout() == Layout::Chunked } - #[cfg(hdf5_1_10_5)] - /// Returns number of chunks if the dataset is chunked. + /// Returns the dataset layout. + pub fn layout(&self) -> Layout { + self.dcpl().map_or(Layout::default(), |pl| pl.layout()) + } + + #[cfg(feature = "1.10.5")] + /// Returns the number of chunks if the dataset is chunked. pub fn num_chunks(&self) -> Option { if !self.is_chunked() { return None; @@ -117,7 +144,7 @@ impl Dataset { })) } - #[cfg(hdf5_1_10_5)] + #[cfg(feature = "1.10.5")] /// Retrieves the chunk information for the chunk specified by its index. pub fn chunk_info(&self, index: usize) -> Option { if !self.is_chunked() { @@ -140,728 +167,948 @@ impl Dataset { } /// Returns the chunk shape if the dataset is chunked. - pub fn chunks(&self) -> Option> { - h5lock!({ - self.dcpl_id().ok().and_then(|dcpl_id| { - if self.is_chunked() { - Some({ - let ndim = self.ndim(); - let mut dims: Vec = Vec::with_capacity(ndim); - dims.set_len(ndim); - H5Pget_chunk(dcpl_id, ndim as _, dims.as_mut_ptr()); - dims.iter().map(|&x| x as _).collect() - }) - } else { - None - } - }) - }) + pub fn chunk(&self) -> Option> { + self.dcpl().map_or(None, |pl| pl.chunk()) } - /// Returns the filters used to create the dataset. - pub fn filters(&self) -> Filters { - h5lock!({ - let dcpl = PropertyList::from_id(H5Dget_create_plist(self.id()))?; - Ok(Filters::from_dcpl(&dcpl)?) - }) - .unwrap_or_else(|_: crate::error::Error| Filters::default()) + /// Returns the absolute byte offset of the dataset in the file if such offset is defined + /// (which is not the case for datasets that are chunked, compact or not allocated yet). + pub fn offset(&self) -> Option { + match h5lock!(H5Dget_offset(self.id())) as haddr_t { + HADDR_UNDEF => None, + offset => Some(offset as _), + } } - /// Returns `true` if object modification time is tracked by the dataset. - pub fn tracks_times(&self) -> bool { - h5lock!({ - self.dcpl_id().ok().map_or(false, |dcpl_id| { - let mut track_times: hbool_t = 0; - h5lock!(H5Pget_obj_track_times(dcpl_id, &mut track_times as *mut _)); - track_times > 0 - }) - }) + /// Returns default fill value for the dataset if such value is set. + pub fn fill_value(&self) -> Result> { + h5lock!(self.dcpl()?.get_fill_value(&self.dtype()?.to_descriptor()?)) } - /// Returns the absolute byte offset of the dataset in the file if such offset is defined - /// (which is not the case for datasets that are chunked, compact or not allocated yet). - pub fn offset(&self) -> Option { - let offset: haddr_t = h5lock!(H5Dget_offset(self.id())); - if offset == HADDR_UNDEF { - None - } else { - Some(offset as _) + /// Resizes the dataset to a new shape. + pub fn resize(&self, shape: D) -> Result<()> { + let mut dims: Vec = vec![]; + for dim in &shape.dims() { + dims.push(*dim as _); + } + h5try!(H5Dset_extent(self.id(), dims.as_ptr())); + Ok(()) + } + + /// Returns the pipeline of filters used in this dataset. + pub fn filters(&self) -> Vec { + self.dcpl().map_or(Vec::default(), |pl| pl.filters()) + } +} + +pub struct Maybe(Option); + +impl Deref for Maybe { + type Target = Option; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl From> for Option { + fn from(v: Maybe) -> Self { + v.0 + } +} + +impl From for Maybe { + fn from(v: T) -> Self { + Self(Some(v)) + } +} + +impl From> for Maybe { + fn from(v: Option) -> Self { + Self(v) + } +} + +#[derive(Clone)] +/// A dataset builder +pub struct DatasetBuilder { + builder: DatasetBuilderInner, +} + +impl DatasetBuilder { + pub fn new(parent: &Group) -> Self { + Self { builder: DatasetBuilderInner::new(parent) } + } + + pub fn empty(self) -> DatasetBuilderEmpty { + self.empty_as(&T::type_descriptor()) + } + + pub fn empty_as(self, type_desc: &TypeDescriptor) -> DatasetBuilderEmpty { + DatasetBuilderEmpty { builder: self.builder, type_desc: type_desc.clone() } + } + + pub fn with_data<'d, A, T, D>(self, data: A) -> DatasetBuilderData<'d, T, D> + where + A: Into>, + T: H5Type, + D: ndarray::Dimension, + { + self.with_data_as::(data, &T::type_descriptor()) + } + + pub fn with_data_as<'d, A, T, D>( + self, data: A, type_desc: &TypeDescriptor, + ) -> DatasetBuilderData<'d, T, D> + where + A: Into>, + T: H5Type, + D: ndarray::Dimension, + { + DatasetBuilderData { + builder: self.builder, + data: data.into(), + type_desc: type_desc.clone(), + conv: Conversion::Soft, + } + } +} + +#[derive(Clone)] +/// A dataset builder with the type known +pub struct DatasetBuilderEmpty { + builder: DatasetBuilderInner, + type_desc: TypeDescriptor, +} + +impl DatasetBuilderEmpty { + pub fn shape>(self, extents: S) -> DatasetBuilderEmptyShape { + DatasetBuilderEmptyShape { + builder: self.builder, + type_desc: self.type_desc, + extents: extents.into(), } } + pub fn create<'n, T: Into>>(self, name: T) -> Result { + self.shape(()).create(name) + } +} - /// Returns default fill value for the dataset if such value is set. Note that conversion - /// to the requested type is done by HDF5 which may result in loss of precision for - /// floating-point values if the datatype differs from the datatype of of the dataset. - pub fn fill_value(&self) -> Result> { +#[derive(Clone)] +/// A dataset builder with type and shape known +pub struct DatasetBuilderEmptyShape { + builder: DatasetBuilderInner, + type_desc: TypeDescriptor, + extents: Extents, +} + +impl DatasetBuilderEmptyShape { + pub fn create<'n, T: Into>>(&self, name: T) -> Result { + h5lock!(self.builder.create(&self.type_desc, name.into().into(), &self.extents)) + } +} + +#[derive(Clone)] +/// A dataset builder with type, shape, and data known +pub struct DatasetBuilderData<'d, T, D> { + builder: DatasetBuilderInner, + data: ArrayView<'d, T, D>, + type_desc: TypeDescriptor, + conv: Conversion, +} + +impl<'d, T, D> DatasetBuilderData<'d, T, D> +where + T: H5Type, + D: ndarray::Dimension, +{ + /// Set maximum allowed conversion level. + pub fn conversion(mut self, conv: Conversion) -> Self { + self.conv = conv; + self + } + + /// Disallow all conversions. + pub fn no_convert(mut self) -> Self { + self.conv = Conversion::NoOp; + self + } + + pub fn create<'n, N: Into>>(&self, name: N) -> Result { + ensure!( + self.data.is_standard_layout(), + "input array is not in standard layout or is not contiguous" + ); // TODO: relax this when it's supported in the writer + let extents = Extents::from(self.data.shape()); + let name = name.into().into(); h5lock!({ - let defined: *mut H5D_fill_value_t = &mut H5D_fill_value_t::H5D_FILL_VALUE_UNDEFINED; - let dcpl_id = self.dcpl_id()?; - h5try!(H5Pfill_value_defined(dcpl_id, defined)); - match *defined { - H5D_fill_value_t::H5D_FILL_VALUE_ERROR => fail!("Invalid fill value"), - H5D_fill_value_t::H5D_FILL_VALUE_UNDEFINED => Ok(None), - _ => { - let datatype = Datatype::from_type::()?; - let mut value = mem::MaybeUninit::::uninit(); - h5try!( - H5Pget_fill_value(dcpl_id, datatype.id(), value.as_mut_ptr() as *mut _,) - ); - Ok(Some(value.assume_init())) - } + let dtype_src = Datatype::from_type::()?; + let dtype_dst = Datatype::from_descriptor(&self.type_desc)?; + dtype_src.ensure_convertible(&dtype_dst, self.conv)?; + let ds = self.builder.create(&self.type_desc, name, &extents)?; + if let Err(err) = ds.write(self.data.view()) { + self.builder.try_unlink(name); + Err(err) + } else { + Ok(ds) } }) } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Chunk { + Exact(Vec), // exact chunk shape + MinKB(usize), // minimum chunk shape in KB + None, // leave it unchunked +} - fn dcpl_id(&self) -> Result { - h5call!(H5Dget_create_plist(self.id())) +impl Default for Chunk { + fn default() -> Self { + Self::None } +} - pub fn resize(&self, d: D) -> Result<()> { - let mut dims: Vec = vec![]; - for dim in &d.dims() { - dims.push(*dim as _); +fn compute_chunk_shape(dims: &SimpleExtents, minimum_elements: usize) -> Vec { + let mut chunk_shape = vec![1; dims.ndim()]; + let mut product_cs = 1; + + // For c-order datasets we iterate from the back (fastest iteration order) + for (extent, cs) in dims.iter().zip(chunk_shape.iter_mut()).rev() { + if product_cs >= minimum_elements { + break; } - h5try!(H5Dset_extent(self.id(), dims.as_ptr())); - Ok(()) + let wanted_size = minimum_elements / product_cs; + // If unlimited dimension we just map to wanted_size + *cs = extent.max.map_or(wanted_size, |maxdim| { + // If the requested chunk size would result + // in dividing the chunk in two uneven parts, + // we instead merge these into the same chunk + // to prevent having small chunks + if 2 * wanted_size > maxdim + 1 { + maxdim + } else { + std::cmp::min(wanted_size, maxdim) + } + }); + + product_cs *= *cs; } + chunk_shape } #[derive(Clone)] -pub struct DatasetBuilder { - packed: bool, - filters: Filters, - chunk: Chunk, +/// The true internal dataset builder +struct DatasetBuilderInner { parent: Result, - track_times: bool, - resizable: bool, - fill_value: Option, + dapl_base: Option, + dcpl_base: Option, + lcpl_base: Option, + dapl_builder: DatasetAccessBuilder, + dcpl_builder: DatasetCreateBuilder, + lcpl_builder: LinkCreateBuilder, + packed: bool, + chunk: Option, } -impl DatasetBuilder { - /// Create a new dataset builder and bind it to the parent container. +impl DatasetBuilderInner { pub fn new(parent: &Group) -> Self { - h5lock!({ - // Store the reference to the parent handle and try to increase its reference count. - let handle = Handle::try_new(parent.id()); - if let Ok(ref handle) = handle { - handle.incref(); + // same as in h5py, disable time tracking by default and enable intermediate groups + let mut dcpl = DatasetCreateBuilder::default(); + dcpl.obj_track_times(false); + let mut lcpl = LinkCreateBuilder::default(); + lcpl.create_intermediate_group(true); + + Self { + parent: parent.try_borrow(), + dapl_base: None, + dcpl_base: None, + lcpl_base: None, + dapl_builder: DatasetAccessBuilder::default(), + dcpl_builder: dcpl, + lcpl_builder: lcpl, + packed: false, + chunk: None, + } + } + + pub fn packed(&mut self, packed: bool) { + self.packed = packed; + } + + fn build_dapl(&self) -> Result { + let mut dapl = match &self.dapl_base { + Some(dapl) => dapl.clone(), + None => DatasetAccess::try_new()?, + }; + self.dapl_builder.apply(&mut dapl).map(|_| dapl) + } + + fn compute_chunk_shape(&self, dtype: &Datatype, extents: &Extents) -> Result>> { + let extents = if let Extents::Simple(extents) = extents { + extents + } else { + return Ok(None); + }; + let has_filters = self.dcpl_builder.has_filters() + || self.dcpl_base.as_ref().map_or(false, DatasetCreate::has_filters); + let chunking_required = has_filters || extents.is_resizable(); + let chunking_allowed = extents.size() > 0 || extents.is_resizable(); + + let chunk = if let Some(chunk) = &self.chunk { + chunk.clone() + } else if chunking_required && chunking_allowed { + Chunk::MinKB(DEFAULT_CHUNK_SIZE_KB) + } else { + Chunk::None + }; + + let chunk_shape = match chunk { + Chunk::Exact(chunk) => Some(chunk), + Chunk::MinKB(size) => { + let min_elements = size / dtype.size() * 1024; + Some(compute_chunk_shape(extents, min_elements)) + } + Chunk::None => { + ensure!(!extents.is_resizable(), "Chunking required for resizable datasets"); + ensure!(!has_filters, "Chunking required when filters are present"); + None } + }; + if let Some(ref chunk) = chunk_shape { + let ndim = extents.ndim(); + ensure!(ndim != 0, "Chunking cannot be enabled for 0-dim datasets"); + ensure!(ndim == chunk.len(), "Expected chunk ndim {}, got {}", ndim, chunk.len()); + let chunk_size = chunk.iter().product::(); + ensure!(chunk_size > 0, "All chunk dimensions must be positive, got {:?}", chunk); + let dims_ok = extents.iter().zip(chunk).all(|(e, c)| e.max.is_none() || *c <= e.dim); + ensure!(dims_ok, "Chunk dimensions ({:?}) exceed data shape ({:?})", chunk, extents); + } + Ok(chunk_shape) + } + + fn build_dcpl(&self, dtype: &Datatype, extents: &Extents) -> Result { + self.dcpl_builder.validate_filters(dtype.id())?; - Self { - packed: false, - filters: Filters::default(), - chunk: Chunk::Auto, - parent: handle, - track_times: false, - resizable: false, - fill_value: None, + let mut dcpl_builder = self.dcpl_builder.clone(); + if let Some(chunk) = self.compute_chunk_shape(dtype, extents)? { + dcpl_builder.chunk(chunk); + if !dcpl_builder.has_fill_time() { + // prevent resize glitch (borrowed from h5py) + dcpl_builder.fill_time(FillTime::Alloc); } - }) + } else { + dcpl_builder.no_chunk(); + } + + let mut dcpl = match &self.dcpl_base { + Some(dcpl) => dcpl.clone(), + None => DatasetCreate::try_new()?, + }; + dcpl_builder.apply(&mut dcpl).map(|_| dcpl) } - pub fn packed(&mut self, packed: bool) -> &mut Self { - self.packed = packed; - self + fn build_lcpl(&self) -> Result { + let mut lcpl = match &self.lcpl_base { + Some(lcpl) => lcpl.clone(), + None => LinkCreate::try_new()?, + }; + self.lcpl_builder.apply(&mut lcpl).map(|_| lcpl) } - pub fn fill_value(&mut self, fill_value: T) -> &mut Self { - self.fill_value = Some(fill_value); - self + fn try_unlink<'n, N: Into>>(&self, name: N) { + if let Some(name) = name.into() { + let name = to_cstring(name).unwrap(); + if let Ok(parent) = &self.parent { + h5lock!(H5Ldelete(parent.id(), name.as_ptr(), H5P_DEFAULT)); + } + } } - /// Disable chunking. - pub fn no_chunk(&mut self) -> &mut Self { - self.chunk = Chunk::None; - self + unsafe fn create( + &self, desc: &TypeDescriptor, name: Option<&str>, extents: &Extents, + ) -> Result { + // construct in-file type descriptor; convert to packed representation if needed + let desc = if self.packed { desc.to_packed_repr() } else { desc.to_c_repr() }; + let dtype = Datatype::from_descriptor(&desc)?; + + // construct DAPL and DCPL, validate filters + let dapl = self.build_dapl()?; + let dcpl = self.build_dcpl(&dtype, extents)?; + + // create the dataspace from extents + let space = Dataspace::try_new(extents)?; + + // extract all ids and create the dataset + let parent = try_ref_clone!(self.parent); + let (pid, dtype_id, space_id, dcpl_id, dapl_id) = + (parent.id(), dtype.id(), space.id(), dcpl.id(), dapl.id()); + let ds_id = if let Some(name) = name { + // create named dataset + let lcpl = self.build_lcpl()?; + let name = to_cstring(name)?; + H5Dcreate2(pid, name.as_ptr(), dtype_id, space_id, lcpl.id(), dcpl_id, dapl_id) + } else { + // create anonymous dataset + H5Dcreate_anon(pid, dtype_id, space_id, dcpl_id, dapl_id) + }; + Dataset::from_id(h5check(ds_id)?) } - /// Enable automatic chunking only if chunking is required (default option). - pub fn chunk_auto(&mut self) -> &mut Self { - self.chunk = Chunk::Auto; - self + //////////////////// + // DatasetAccess // + //////////////////// + + pub fn set_access_plist(&mut self, dapl: &DatasetAccess) { + self.dapl_base = Some(dapl.clone()); } - /// Enable chunking with automatic chunk shape. - pub fn chunk_infer(&mut self) -> &mut Self { - self.chunk = Chunk::Infer; - self + pub fn set_dapl(&mut self, dapl: &DatasetAccess) { + self.set_access_plist(dapl); } - /// Set chunk shape manually. - pub fn chunk(&mut self, chunk: D) -> &mut Self { - self.chunk = Chunk::Manual(chunk.dims()); - self + pub fn access_plist(&mut self) -> &mut DatasetAccessBuilder { + &mut self.dapl_builder } - /// Set the filters. - pub fn filters(&mut self, filters: &Filters) -> &mut Self { - self.filters = filters.clone(); - self + pub fn dapl(&mut self) -> &mut DatasetAccessBuilder { + self.access_plist() } - /// Enable or disable tracking object modification time (disabled by default). - pub fn track_times(&mut self, track_times: bool) -> &mut Self { - self.track_times = track_times; - self + pub fn with_access_plist(&mut self, func: F) + where + F: Fn(&mut DatasetAccessBuilder) -> &mut DatasetAccessBuilder, + { + func(&mut self.dapl_builder); } - /// Make the dataset resizable along all axes (requires chunking). - pub fn resizable(&mut self, resizable: bool) -> &mut Self { - self.resizable = resizable; - self + pub fn with_dapl(&mut self, func: F) + where + F: Fn(&mut DatasetAccessBuilder) -> &mut DatasetAccessBuilder, + { + self.with_access_plist(func); } - /// Enable gzip compression with a specified level (0-9). - pub fn gzip(&mut self, level: u8) -> &mut Self { - self.filters.gzip(level); - self + // DAPL properties + + pub fn chunk_cache(&mut self, nslots: usize, nbytes: usize, w0: f64) { + self.with_dapl(|pl| pl.chunk_cache(nslots, nbytes, w0)); } - /// Enable szip compression with a specified method (EC, NN) and level (0-32). - /// - /// If `nn` if set to `true` (default), the nearest neighbor method is used, otherwise - /// the method is set to entropy coding. - pub fn szip(&mut self, nn: bool, level: u8) -> &mut Self { - self.filters.szip(nn, level); - self + #[cfg(feature = "1.8.17")] + pub fn efile_prefix(&mut self, prefix: &str) { + self.with_dapl(|pl| pl.efile_prefix(prefix)); } - /// Enable or disable shuffle filter. - pub fn shuffle(&mut self, shuffle: bool) -> &mut Self { - self.filters.shuffle(shuffle); - self + #[cfg(feature = "1.10.0")] + pub fn virtual_view(&mut self, view: VirtualView) { + self.with_dapl(|pl| pl.virtual_view(view)); } - /// Enable or disable fletcher32 filter. - pub fn fletcher32(&mut self, fletcher32: bool) -> &mut Self { - self.filters.fletcher32(fletcher32); - self + #[cfg(feature = "1.10.0")] + pub fn virtual_printf_gap(&mut self, gap_size: usize) { + self.with_dapl(|pl| pl.virtual_printf_gap(gap_size)); } - /// Enable scale-offset filter with a specified factor (0 means automatic). - pub fn scale_offset(&mut self, scale_offset: u32) -> &mut Self { - self.filters.scale_offset(scale_offset); - self + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] + pub fn all_coll_metadata_ops(&mut self, is_collective: bool) { + self.with_dapl(|pl| pl.all_coll_metadata_ops(is_collective)); } - fn make_dcpl(&self, datatype: &Datatype, shape: D) -> Result { - h5lock!({ - let dcpl = self.filters.to_dcpl(datatype)?; - let id = dcpl.id(); + //////////////////// + // DatasetCreate // + //////////////////// - h5try!(H5Pset_obj_track_times(id, self.track_times as _)); + pub fn set_create_plist(&mut self, dcpl: &DatasetCreate) { + self.dcpl_base = Some(dcpl.clone()); + } - if let Some(ref fill_value) = self.fill_value { - h5try!(H5Pset_fill_value(id, datatype.id(), fill_value as *const _ as *const _)); - } + pub fn set_dcpl(&mut self, dcpl: &DatasetCreate) { + self.set_create_plist(dcpl); + } - if let Chunk::None = self.chunk { - ensure!( - !self.filters.has_filters(), - "Chunking must be enabled when filters are present" - ); - ensure!(!self.resizable, "Chunking must be enabled for resizable datasets"); - } else { - let no_chunk = if let Chunk::Auto = self.chunk { - !self.filters.has_filters() && !self.resizable - } else { - false - }; - if !no_chunk { - ensure!(shape.ndim() > 0, "Chunking cannot be enabled for scalar datasets"); - - let dims = match self.chunk { - Chunk::Manual(ref c) => c.clone(), - _ => infer_chunk_size(&shape, datatype.size()), - }; - - ensure!( - dims.ndim() == shape.ndim(), - "Invalid chunk ndim: expected {}, got {}", - shape.ndim(), - dims.ndim() - ); - ensure!( - dims.size() > 0, - "Invalid chunk: {:?} (all dimensions must be positive)", - dims - ); - - if !self.resizable { - ensure!( - dims.iter().zip(shape.dims().iter()).all(|(&c, &s)| c <= s), - "Invalid chunk: {:?} (must not exceed data shape in any dimension)", - dims - ); - } - - let c_dims: Vec = dims.iter().map(|&x| x as _).collect(); - h5try!(H5Pset_chunk(id, dims.ndim() as _, c_dims.as_ptr())); - - // For chunked datasets, write fill values at the allocation time. - h5try!(H5Pset_fill_time(id, H5D_FILL_TIME_ALLOC)); - } - } + pub fn create_plist(&mut self) -> &mut DatasetCreateBuilder { + &mut self.dcpl_builder + } - Ok(dcpl) - }) + pub fn dcpl(&mut self) -> &mut DatasetCreateBuilder { + self.create_plist() } - fn make_lcpl() -> Result { - h5lock!({ - let lcpl = PropertyList::from_id(h5try!(H5Pcreate(*H5P_LINK_CREATE)))?; - h5call!(H5Pset_create_intermediate_group(lcpl.id(), 1)).and(Ok(lcpl)) - }) + pub fn with_create_plist(&mut self, func: F) + where + F: Fn(&mut DatasetCreateBuilder) -> &mut DatasetCreateBuilder, + { + func(&mut self.dcpl_builder); } - fn finalize(&self, name: Option<&str>, shape: D) -> Result { - let type_descriptor = if self.packed { - ::type_descriptor().to_packed_repr() - } else { - ::type_descriptor().to_c_repr() - }; - h5lock!({ - let datatype = Datatype::from_descriptor(&type_descriptor)?; - let parent = try_ref_clone!(self.parent); - - let dataspace = Dataspace::try_new(&shape, self.resizable)?; - let dcpl = self.make_dcpl(&datatype, &shape)?; - - if let Some(name) = name { - let lcpl = Self::make_lcpl()?; - let name = to_cstring(name)?; - Dataset::from_id(h5try!(H5Dcreate2( - parent.id(), - name.as_ptr(), - datatype.id(), - dataspace.id(), - lcpl.id(), - dcpl.id(), - H5P_DEFAULT - ))) - } else { - Dataset::from_id(h5try!(H5Dcreate_anon( - parent.id(), - datatype.id(), - dataspace.id(), - dcpl.id(), - H5P_DEFAULT - ))) - } - }) + pub fn with_dcpl(&mut self, func: F) + where + F: Fn(&mut DatasetCreateBuilder) -> &mut DatasetCreateBuilder, + { + self.with_create_plist(func); } - /// Create the dataset and link it into the file structure. - pub fn create(&self, name: &str, shape: D) -> Result { - self.finalize(Some(name), shape) + // DCPL properties + + pub fn set_filters(&mut self, filters: &[Filter]) { + self.with_dcpl(|pl| pl.set_filters(filters)); } - /// Create an anonymous dataset without linking it. - pub fn create_anon(&self, shape: D) -> Result { - self.finalize(None, shape) + pub fn deflate(&mut self, level: u8) { + self.with_dcpl(|pl| pl.deflate(level)); } -} -fn infer_chunk_size(shape: &D, typesize: usize) -> Vec { - // This algorithm is borrowed from h5py, though the idea originally comes from PyTables. + pub fn shuffle(&mut self) { + self.with_dcpl(DatasetCreateBuilder::shuffle); + } - const CHUNK_BASE: f64 = (16 * 1024) as _; - const CHUNK_MIN: f64 = (8 * 1024) as _; - const CHUNK_MAX: f64 = (1024 * 1024) as _; + pub fn fletcher32(&mut self) { + self.with_dcpl(DatasetCreateBuilder::fletcher32); + } - if shape.ndim() == 0 { - return vec![]; - } else if shape.size() == 0 { - return vec![1]; + pub fn szip(&mut self, coding: SZip, px_per_block: u8) { + self.with_dcpl(|pl| pl.szip(coding, px_per_block)); } - let mut chunks = shape.dims(); - let total = (typesize * shape.size()) as f64; - let mut target: f64 = CHUNK_BASE * (total / (1024.0 * 1024.0)).log10().exp2(); + pub fn nbit(&mut self) { + self.with_dcpl(DatasetCreateBuilder::nbit); + } - if target > CHUNK_MAX { - target = CHUNK_MAX; - } else if target < CHUNK_MIN { - target = CHUNK_MIN; + pub fn scale_offset(&mut self, mode: ScaleOffset) { + self.with_dcpl(|pl| pl.scale_offset(mode)); } - // Loop over axes, dividing them by 2, stop when all of the following is true: - // - chunk size is smaller than the target chunk size or is within 50% of target chunk size - // - chunk size is smaller than the maximum chunk size - for i in 0.. { - let size: usize = chunks.iter().product(); - let bytes = (size * typesize) as f64; - if (bytes < target * 1.5 && bytes < CHUNK_MAX) || size == 1 { - break; - } - let axis = i % shape.ndim(); - chunks[axis] = div_floor(chunks[axis] + 1, 2); + #[cfg(feature = "lzf")] + /// Apply a `lzf` filter + /// + /// This requires the `lzf` crate feature + pub fn lzf(&mut self) { + self.with_dcpl(|pl| pl.lzf()); } - chunks -} + #[cfg(feature = "blosc")] + /// Apply a `blosc` filter + /// + /// This requires the `blosc` crate feature + pub fn blosc(&mut self, complib: Blosc, clevel: u8, shuffle: impl Into) { + let shuffle = shuffle.into(); + self.with_dcpl(|pl| pl.blosc(complib, clevel, shuffle)); + } -#[cfg(test)] -pub mod tests { - use std::fs; - use std::io::Read; + #[cfg(feature = "blosc")] + pub fn blosc_blosclz(&mut self, clevel: u8, shuffle: impl Into) { + let shuffle = shuffle.into(); + self.with_dcpl(|pl| pl.blosc_blosclz(clevel, shuffle)); + } - use hdf5_sys::{h5d::H5Dwrite, h5s::H5S_ALL}; + #[cfg(feature = "blosc")] + pub fn blosc_lz4(&mut self, clevel: u8, shuffle: impl Into) { + let shuffle = shuffle.into(); + self.with_dcpl(|pl| pl.blosc_lz4(clevel, shuffle)); + } - use crate::filters::{gzip_available, szip_available}; - use crate::internal_prelude::*; + #[cfg(feature = "blosc")] + pub fn blosc_lz4hc(&mut self, clevel: u8, shuffle: impl Into) { + let shuffle = shuffle.into(); + self.with_dcpl(|pl| pl.blosc_lz4hc(clevel, shuffle)); + } - use super::infer_chunk_size; + #[cfg(feature = "blosc")] + pub fn blosc_snappy(&mut self, clevel: u8, shuffle: impl Into) { + let shuffle = shuffle.into(); + self.with_dcpl(|pl| pl.blosc_snappy(clevel, shuffle)); + } - #[test] - pub fn test_infer_chunk_size() { - assert_eq!(infer_chunk_size(&(), 1), vec![]); - assert_eq!(infer_chunk_size(&0, 1), vec![1]); - assert_eq!(infer_chunk_size(&(1,), 1), vec![1]); - - // generated regression tests vs h5py implementation - assert_eq!(infer_chunk_size(&(65682868,), 1), vec![64144]); - assert_eq!(infer_chunk_size(&(56755037,), 2), vec![27713]); - assert_eq!(infer_chunk_size(&(56882283,), 4), vec![27775]); - assert_eq!(infer_chunk_size(&(21081789,), 8), vec![10294]); - assert_eq!(infer_chunk_size(&(5735, 6266), 1), vec![180, 392]); - assert_eq!(infer_chunk_size(&(467, 4427), 2), vec![30, 554]); - assert_eq!(infer_chunk_size(&(5579, 8323), 4), vec![88, 261]); - assert_eq!(infer_chunk_size(&(1686, 770), 8), vec![106, 49]); - assert_eq!(infer_chunk_size(&(344, 414, 294), 1), vec![22, 52, 37]); - assert_eq!(infer_chunk_size(&(386, 192, 444), 2), vec![25, 24, 56]); - assert_eq!(infer_chunk_size(&(277, 161, 460), 4), vec![18, 21, 58]); - assert_eq!(infer_chunk_size(&(314, 22, 253), 8), vec![40, 3, 32]); - assert_eq!(infer_chunk_size(&(89, 49, 91, 59), 1), vec![12, 13, 23, 15]); - assert_eq!(infer_chunk_size(&(42, 92, 60, 80), 2), vec![6, 12, 15, 20]); - assert_eq!(infer_chunk_size(&(15, 62, 62, 47), 4), vec![4, 16, 16, 12]); - assert_eq!(infer_chunk_size(&(62, 51, 55, 64), 8), vec![8, 7, 7, 16]); + #[cfg(feature = "blosc")] + pub fn blosc_zlib(&mut self, clevel: u8, shuffle: impl Into) { + let shuffle = shuffle.into(); + self.with_dcpl(|pl| pl.blosc_zlib(clevel, shuffle)); } - #[test] - pub fn test_is_chunked() { - with_tmp_file(|file| { - assert_eq!(file.new_dataset::().create_anon(1).unwrap().is_chunked(), false); - assert_eq!( - file.new_dataset::().shuffle(true).create_anon(1).unwrap().is_chunked(), - true - ); - }) + #[cfg(feature = "blosc")] + pub fn blosc_zstd(&mut self, clevel: u8, shuffle: impl Into) { + let shuffle = shuffle.into(); + self.with_dcpl(|pl| pl.blosc_zstd(clevel, shuffle)); } - #[test] - pub fn test_chunks() { - with_tmp_file(|file| { - assert_eq!(file.new_dataset::().create_anon(1).unwrap().chunks(), None); - assert_eq!(file.new_dataset::().no_chunk().create_anon(1).unwrap().chunks(), None); - assert_eq!( - file.new_dataset::().chunk((1, 2)).create_anon((10, 20)).unwrap().chunks(), - Some(vec![1, 2]) - ); - assert_eq!( - file.new_dataset::().chunk_infer().create_anon((5579, 8323)).unwrap().chunks(), - Some(vec![88, 261]) - ); - assert_eq!( - file.new_dataset::().chunk_auto().create_anon((5579, 8323)).unwrap().chunks(), - None - ); - assert_eq!( - file.new_dataset::() - .chunk_auto() - .shuffle(true) - .create_anon((5579, 8323)) - .unwrap() - .chunks(), - Some(vec![88, 261]) - ); - }) + pub fn add_filter(&mut self, id: H5Z_filter_t, cdata: &[c_uint]) { + self.with_dcpl(|pl| pl.add_filter(id, cdata)); } - #[test] - pub fn test_chunks_resizable_zero_size() { - with_tmp_file(|file| { - let ds = file - .new_dataset::() - .chunk((128,)) - .resizable(true) - .create("chunked_empty", (0,)) - .unwrap(); - assert_eq!(ds.shape(), vec![0]); - - ds.resize((10,)).unwrap(); - assert_eq!(ds.shape(), vec![10]); - - ds.as_writer().write(&vec![3; 10]).unwrap(); - }) + pub fn clear_filters(&mut self) { + self.with_dcpl(DatasetCreateBuilder::clear_filters); } - #[test] - pub fn test_invalid_chunk() { - with_tmp_file(|file| { - let b = file.new_dataset::(); - assert_err!( - b.clone().shuffle(true).no_chunk().create_anon(1), - "Chunking must be enabled when filters are present" - ); - assert_err!( - b.clone().no_chunk().resizable(true).create_anon(1), - "Chunking must be enabled for resizable datasets" - ); - assert_err!( - b.clone().chunk_infer().create_anon(()), - "Chunking cannot be enabled for scalar datasets" - ); - assert_err!( - b.clone().chunk((1, 2)).create_anon(()), - "Chunking cannot be enabled for scalar datasets" - ); - assert_err!( - b.clone().chunk((1, 2)).create_anon(1), - "Invalid chunk ndim: expected 1, got 2" - ); - assert_err!( - b.clone().chunk((0, 2)).create_anon((1, 2)), - "Invalid chunk: [0, 2] (all dimensions must be positive)" - ); - assert_err!( - b.clone().chunk((1, 3)).create_anon((1, 2)), - "Invalid chunk: [1, 3] (must not exceed data shape in any dimension)" - ); - }) + pub fn alloc_time(&mut self, alloc_time: Option) { + self.with_dcpl(|pl| pl.alloc_time(alloc_time)); } - #[test] - pub fn test_shape_ndim_size() { - with_tmp_file(|file| { - let d = file.new_dataset::().create_anon((2, 3)).unwrap(); - assert_eq!(d.shape(), vec![2, 3]); - assert_eq!(d.size(), 6); - assert_eq!(d.ndim(), 2); - assert_eq!(d.is_scalar(), false); - - let d = file.new_dataset::().create_anon(()).unwrap(); - assert_eq!(d.shape(), vec![]); - assert_eq!(d.size(), 1); - assert_eq!(d.ndim(), 0); - assert_eq!(d.is_scalar(), true); - }) + pub fn fill_time(&mut self, fill_time: FillTime) { + self.with_dcpl(|pl| pl.fill_time(fill_time)); } - #[test] - pub fn test_filters() { - with_tmp_file(|file| { - assert_eq!( - file.new_dataset::().create_anon(100).unwrap().filters(), - Filters::default() - ); - assert_eq!( - file.new_dataset::() - .shuffle(true) - .create_anon(100) - .unwrap() - .filters() - .get_shuffle(), - true - ); - assert_eq!( - file.new_dataset::() - .fletcher32(true) - .create_anon(100) - .unwrap() - .filters() - .get_fletcher32(), - true - ); - assert_eq!( - file.new_dataset::() - .scale_offset(8) - .create_anon(100) - .unwrap() - .filters() - .get_scale_offset(), - Some(8) - ); - if gzip_available() { - assert_eq!( - file.new_dataset::() - .gzip(7) - .create_anon(100) - .unwrap() - .filters() - .get_gzip(), - Some(7) - ); - } - if szip_available() { - assert_eq!( - file.new_dataset::() - .szip(false, 4) - .create_anon(100) - .unwrap() - .filters() - .get_szip(), - Some((false, 4)) - ); - } - }); + pub fn fill_value>(&mut self, fill_value: T) { + self.dcpl_builder.fill_value(fill_value); + } - with_tmp_file(|file| { - let filters = Filters::new().fletcher32(true).shuffle(true).clone(); - assert_eq!( - file.new_dataset::().filters(&filters).create_anon(100).unwrap().filters(), - filters - ); - }) + pub fn no_fill_value(&mut self) { + self.with_dcpl(DatasetCreateBuilder::no_fill_value); } - #[test] - pub fn test_resizable() { - with_tmp_file(|file| { - assert_eq!(file.new_dataset::().create_anon(1).unwrap().is_resizable(), false); - assert_eq!( - file.new_dataset::().resizable(false).create_anon(1).unwrap().is_resizable(), - false - ); - assert_eq!( - file.new_dataset::().resizable(true).create_anon(1).unwrap().is_resizable(), - true - ); - }) + pub fn chunk(&mut self, chunk: D) { + self.chunk = Some(Chunk::Exact(chunk.dims())); } - #[test] - pub fn test_track_times() { - with_tmp_file(|file| { - assert_eq!(file.new_dataset::().create_anon(1).unwrap().tracks_times(), false); - assert_eq!( - file.new_dataset::().track_times(false).create_anon(1).unwrap().tracks_times(), - false - ); - assert_eq!( - file.new_dataset::().track_times(true).create_anon(1).unwrap().tracks_times(), - true - ); - }); + pub fn chunk_min_kb(&mut self, size: usize) { + self.chunk = Some(Chunk::MinKB(size)); + } - with_tmp_path(|path| { - let mut buf1: Vec = Vec::new(); - File::create(&path).unwrap().new_dataset::().create("foo", 1).unwrap(); - fs::File::open(&path).unwrap().read_to_end(&mut buf1).unwrap(); - - let mut buf2: Vec = Vec::new(); - File::create(&path) - .unwrap() - .new_dataset::() - .track_times(false) - .create("foo", 1) - .unwrap(); - fs::File::open(&path).unwrap().read_to_end(&mut buf2).unwrap(); - - assert_eq!(buf1, buf2); - - let mut buf2: Vec = Vec::new(); - File::create(&path) - .unwrap() - .new_dataset::() - .track_times(true) - .create("foo", 1) - .unwrap(); - fs::File::open(&path).unwrap().read_to_end(&mut buf2).unwrap(); - assert_ne!(buf1, buf2); - }); + pub fn no_chunk(&mut self) { + self.chunk = Some(Chunk::None); } - #[test] - pub fn test_storage_size_offset() { - with_tmp_file(|file| { - let ds = file.new_dataset::().create_anon(3).unwrap(); - assert_eq!(ds.storage_size(), 0); - assert!(ds.offset().is_none()); - - let buf: Vec = vec![1, 2, 3]; - h5call!(H5Dwrite( - ds.id(), - Datatype::from_type::().unwrap().id(), - H5S_ALL, - H5S_ALL, - H5P_DEFAULT, - buf.as_ptr() as *const _ - )) - .unwrap(); - assert_eq!(ds.storage_size(), 6); - assert!(ds.offset().is_some()); - }) + pub fn layout(&mut self, layout: Layout) { + self.with_dcpl(|pl| pl.layout(layout)); } - #[test] - pub fn test_datatype() { - with_tmp_file(|file| { - assert_eq!( - file.new_dataset::().create_anon(1).unwrap().dtype().unwrap(), - Datatype::from_type::().unwrap() - ); - }) + #[cfg(feature = "1.10.0")] + pub fn chunk_opts(&mut self, opts: ChunkOpts) { + self.with_dcpl(|pl| pl.chunk_opts(opts)); } - #[test] - pub fn test_create_anon() { - with_tmp_file(|file| { - let ds = file.new_dataset::().create("foo/bar", (1, 2)).unwrap(); - assert!(ds.is_valid()); - assert_eq!(ds.shape(), vec![1, 2]); - assert_eq!(ds.name(), "/foo/bar"); - assert_eq!(file.group("foo").unwrap().dataset("bar").unwrap().shape(), vec![1, 2]); - - let ds = file.new_dataset::().create_anon((2, 3)).unwrap(); - assert!(ds.is_valid()); - assert_eq!(ds.name(), ""); - assert_eq!(ds.shape(), vec![2, 3]); - }) + pub fn external(&mut self, name: &str, offset: usize, size: usize) { + self.with_dcpl(|pl| pl.external(name, offset, size)); } - #[test] - pub fn test_fill_value() { - with_tmp_file(|file| { - macro_rules! check_fill_value { - ($ds:expr, $tp:ty, $v:expr) => { - assert_eq!(($ds).fill_value::<$tp>().unwrap(), Some(($v) as $tp)); - }; + #[cfg(feature = "1.10.0")] + pub fn virtual_map( + &mut self, src_filename: F, src_dataset: D, src_extents: E1, src_selection: S1, + vds_extents: E2, vds_selection: S2, + ) where + F: AsRef, + D: AsRef, + E1: Into, + S1: Into, + E2: Into, + S2: Into, + { + self.dcpl_builder.virtual_map( + src_filename, + src_dataset, + src_extents, + src_selection, + vds_extents, + vds_selection, + ); + } + + pub fn obj_track_times(&mut self, track_times: bool) { + self.with_dcpl(|pl| pl.obj_track_times(track_times)); + } + + pub fn attr_phase_change(&mut self, max_compact: u32, min_dense: u32) { + self.with_dcpl(|pl| pl.attr_phase_change(max_compact, min_dense)); + } + + pub fn attr_creation_order(&mut self, attr_creation_order: AttrCreationOrder) { + self.with_dcpl(|pl| pl.attr_creation_order(attr_creation_order)); + } + + //////////////////// + // LinkCreate // + //////////////////// + + pub fn set_link_create_plist(&mut self, lcpl: &LinkCreate) { + self.lcpl_base = Some(lcpl.clone()); + } + + pub fn set_lcpl(&mut self, lcpl: &LinkCreate) { + self.set_link_create_plist(lcpl); + } + + pub fn link_create_plist(&mut self) -> &mut LinkCreateBuilder { + &mut self.lcpl_builder + } + + pub fn lcpl(&mut self) -> &mut LinkCreateBuilder { + self.link_create_plist() + } + + pub fn with_link_create_plist(&mut self, func: F) + where + F: Fn(&mut LinkCreateBuilder) -> &mut LinkCreateBuilder, + { + func(&mut self.lcpl_builder); + } + + pub fn with_lcpl(&mut self, func: F) + where + F: Fn(&mut LinkCreateBuilder) -> &mut LinkCreateBuilder, + { + self.with_link_create_plist(func); + } + + // LCPL properties + + pub fn create_intermediate_group(&mut self, create: bool) { + self.with_lcpl(|pl| pl.create_intermediate_group(create)); + } + + pub fn char_encoding(&mut self, encoding: CharEncoding) { + self.with_lcpl(|pl| pl.char_encoding(encoding)); + } +} + +macro_rules! impl_builder { + ($plist:ident: $name:ident/$short:ident) => { + paste::paste! { + #[inline] #[must_use] + pub fn [](mut self, $short: &$plist) -> Self { + self.builder.[]($short); self + } + + #[inline] #[must_use] + pub fn [](mut self, $short: &$plist) -> Self { + self.builder.[]($short); self + } + + #[inline] + pub fn [<$name _plist>](&mut self) -> &mut [<$plist Builder>] { + self.builder.[<$name _plist>]() + } + + #[inline] + pub fn $short(&mut self) -> &mut [<$plist Builder>] { + self.builder.$short() } - macro_rules! check_fill_value_approx { - ($ds:expr, $tp:ty, $v:expr) => {{ - let fill_value = ($ds).fill_value::<$tp>().unwrap().unwrap(); - // FIXME: should inexact float->float casts be prohibited? - assert!((fill_value - (($v) as $tp)).abs() < (1.0e-6 as $tp)); - }}; + #[inline] #[must_use] + pub fn [](mut self, func: F) -> Self + where + F: Fn(&mut [<$plist Builder>]) -> &mut [<$plist Builder>], + { + self.builder.[](func); self } - macro_rules! check_all_fill_values { - ($ds:expr, $v:expr) => { - check_fill_value!($ds, u8, $v); - check_fill_value!($ds, u16, $v); - check_fill_value!($ds, u32, $v); - check_fill_value!($ds, u64, $v); - check_fill_value!($ds, i8, $v); - check_fill_value!($ds, i16, $v); - check_fill_value!($ds, i32, $v); - check_fill_value!($ds, i64, $v); - check_fill_value!($ds, usize, $v); - check_fill_value!($ds, isize, $v); - check_fill_value_approx!($ds, f32, $v); - check_fill_value_approx!($ds, f64, $v); - }; + #[inline] #[must_use] + pub fn [](mut self, func: F) -> Self + where + F: Fn(&mut [<$plist Builder>]) -> &mut [<$plist Builder>], + { + self.builder.[](func); self + } + } + }; + (*: $name:ident($($var:ident: $ty:ty),*)) => { + #[inline] #[must_use] + pub fn $name(mut self $(, $var: $ty)*) -> Self { + self.builder.$name($($var),*); self + } + }; + ( + $(#[$meta:meta])* + $plist:ident: $name:ident($($var:ident: $ty:ty),*) + ) => { + paste::paste! { + $(#[$meta])* + #[inline] #[must_use] #[doc = + "\u{21b3} [`" $plist "Builder::" $name "`]" + "(crate::plist::" $plist "Builder::" $name ")" + ] + pub fn $name(mut self $(, $var: $ty)*) -> Self { + self.builder.$name($($var),*); self + } + } + }; + ( + $(#[$meta:meta])* + $plist:ident: $name:ident<$($gid:ident: $gty:path),+>($($var:ident: $ty:ty),*) + ) => { + paste::paste! { + $(#[$meta])* + #[inline] #[must_use] #[doc = + "\u{21b3} [`" $plist "Builder::" $name "`]" + "(crate::plist::" $plist "Builder::" $name ")" + ] + pub fn $name<$($gid: $gty),+>(mut self $(, $var: $ty)*) -> Self { + self.builder.$name($($var),*); self } + } + }; +} + +macro_rules! impl_builder_methods { + () => { + impl_builder!(*: packed(packed: bool)); + + impl_builder!(DatasetAccess: access/dapl); + + impl_builder!(DatasetAccess: chunk_cache(nslots: usize, nbytes: usize, w0: f64)); + impl_builder!(#[cfg(feature = "1.8.17")] DatasetAccess: efile_prefix(prefix: &str)); + impl_builder!(#[cfg(feature = "1.10.0")] DatasetAccess: virtual_view(view: VirtualView)); + impl_builder!(#[cfg(feature = "1.10.0")] DatasetAccess: virtual_printf_gap(gap_size: usize)); + impl_builder!( + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] + DatasetAccess: all_coll_metadata_ops(is_collective: bool) + ); + + impl_builder!(DatasetCreate: create/dcpl); + + impl_builder!(DatasetCreate: set_filters(filters: &[Filter])); + impl_builder!(DatasetCreate: deflate(level: u8)); + impl_builder!(DatasetCreate: shuffle()); + impl_builder!(DatasetCreate: fletcher32()); + impl_builder!(DatasetCreate: szip(coding: SZip, px_per_block: u8)); + impl_builder!(DatasetCreate: nbit()); + impl_builder!(DatasetCreate: scale_offset(mode: ScaleOffset)); + impl_builder!(#[cfg(feature = "lzf")] DatasetCreate: lzf()); + impl_builder!( + #[cfg(feature = "blosc")] + DatasetCreate: blosc(complib: Blosc, clevel: u8, shuffle: impl Into) + ); + impl_builder!( + #[cfg(feature = "blosc")] + DatasetCreate: blosc_blosclz(clevel: u8, shuffle: impl Into) + ); + impl_builder!( + #[cfg(feature = "blosc")] + DatasetCreate: blosc_lz4(clevel: u8, shuffle: impl Into) + ); + impl_builder!( + #[cfg(feature = "blosc")] + DatasetCreate: blosc_lz4hc(clevel: u8, shuffle: impl Into) + ); + impl_builder!( + #[cfg(feature = "blosc")] + DatasetCreate: blosc_snappy(clevel: u8, shuffle: impl Into) + ); + impl_builder!( + #[cfg(feature = "blosc")] + DatasetCreate: blosc_zlib(clevel: u8, shuffle: impl Into) + ); + impl_builder!( + #[cfg(feature = "blosc")] + DatasetCreate: blosc_zstd(clevel: u8, shuffle: impl Into) + ); + impl_builder!(DatasetCreate: add_filter(id: H5Z_filter_t, cdata: &[c_uint])); + impl_builder!(DatasetCreate: clear_filters()); + impl_builder!(DatasetCreate: alloc_time(alloc_time: Option)); + impl_builder!(DatasetCreate: fill_time(fill_time: FillTime)); + impl_builder!(DatasetCreate: fill_value>(fill_value: T)); + impl_builder!(DatasetCreate: no_fill_value()); + impl_builder!(DatasetCreate: chunk(chunk: D)); + impl_builder!(*: chunk_min_kb(size: usize)); + impl_builder!(DatasetCreate: no_chunk()); + impl_builder!(DatasetCreate: layout(layout: Layout)); + impl_builder!(#[cfg(feature = "1.10.0")] DatasetCreate: chunk_opts(opts: ChunkOpts)); + impl_builder!(DatasetCreate: external(name: &str, offset: usize, size: usize)); + impl_builder!( + #[cfg(feature = "1.10.0")] + DatasetCreate: virtual_map< + F: AsRef, D: AsRef, + E1: Into, S1: Into, E2: Into, S2: Into + >( + src_filename: F, src_dataset: D, + src_extents: E1, src_selection: S1, vds_extents: E2, vds_selection: S2 + ) + ); + impl_builder!(DatasetCreate: obj_track_times(track_times: bool)); + impl_builder!(DatasetCreate: attr_phase_change(max_compact: u32, min_dense: u32)); + impl_builder!(DatasetCreate: attr_creation_order(attr_creation_order: AttrCreationOrder)); + + impl_builder!(LinkCreate: link_create/lcpl); + + impl_builder!(LinkCreate: create_intermediate_group(create: bool)); + impl_builder!(LinkCreate: char_encoding(encoding: CharEncoding)); + }; +} + +/// These methods are common to all dataset builders +impl DatasetBuilder { + impl_builder_methods!(); +} + +/// The following methods are common to all dataset builders. +impl DatasetBuilderEmpty { + impl_builder_methods!(); +} - let ds = file.new_dataset::().create_anon(100).unwrap(); - check_all_fill_values!(ds, 0); +/// The following methods are common to all dataset builders. +impl DatasetBuilderEmptyShape { + impl_builder_methods!(); +} - let ds = file.new_dataset::().fill_value(42).create_anon(100).unwrap(); - check_all_fill_values!(ds, 42); +/// The following methods are common to all dataset builders. +impl<'d, T2: H5Type, D2: ndarray::Dimension> DatasetBuilderData<'d, T2, D2> { + impl_builder_methods!(); +} - let ds = file.new_dataset::().fill_value(1.234).create_anon(100).unwrap(); - check_all_fill_values!(ds, 1.234); +#[cfg(test)] +mod tests { + use super::{compute_chunk_shape, DatasetBuilder}; + use crate::filters::Filter; + use crate::test::with_tmp_file; + use crate::{Extent, Result, SimpleExtents}; + + #[cfg(feature = "blosc")] + use crate::filters::{Blosc, BloscShuffle}; + + use ndarray::Array2; + + #[allow(dead_code)] + fn check_filter(func: impl Fn(DatasetBuilder) -> DatasetBuilder, flt: Filter) { + let filters = vec![flt]; + with_tmp_file::, _>(|file| { + let arr = Array2::::ones((1000, 20)); + func(file.new_dataset_builder()).with_data(&arr).create("foo")?; + let ds = file.dataset("foo")?; + assert_eq!(ds.filters(), filters); + assert_eq!(ds.read_2d::()?, &arr); + Ok(()) }) + .unwrap() + } + + #[test] + #[cfg(feature = "blosc")] + fn test_blosc() { + check_filter(|d| d.blosc_zstd(9, true), Filter::Blosc(Blosc::ZStd, 9, BloscShuffle::Byte)); + } + + #[test] + #[cfg(feature = "lzf")] + fn test_lzf() { + check_filter(|d| d.lzf(), Filter::LZF); + } + + #[test] + fn test_compute_chunk_shape() { + let e = SimpleExtents::new(&[1, 1]); + assert_eq!(compute_chunk_shape(&e, 1), vec![1, 1]); + let e = SimpleExtents::new(&[1, 10]); + assert_eq!(compute_chunk_shape(&e, 3), vec![1, 3]); + let e = SimpleExtents::new(&[1, 10]); + assert_eq!(compute_chunk_shape(&e, 11), vec![1, 10]); + + let e = SimpleExtents::new(&[Extent::from(1), Extent::from(10..)]); + assert_eq!(compute_chunk_shape(&e, 11), vec![1, 11]); + + let e = SimpleExtents::new(&[Extent::from(1), Extent::from(10..)]); + assert_eq!(compute_chunk_shape(&e, 9), vec![1, 9]); + + let e = SimpleExtents::new(&[4, 4, 4]); + // chunk shape should be greedy here, a minimal + // chunk shape would be (1, 3, 4) + (1, 1, 4) + assert_eq!(compute_chunk_shape(&e, 12), vec![1, 4, 4]); + + let e = SimpleExtents::new(&[4, 4, 4]); + assert_eq!(compute_chunk_shape(&e, 100), vec![4, 4, 4]); + + let e = SimpleExtents::new(&[4, 4, 4]); + assert_eq!(compute_chunk_shape(&e, 9), vec![1, 2, 4]); + + let e = SimpleExtents::new(&[1, 1, 100]); + assert_eq!(compute_chunk_shape(&e, 51), vec![1, 1, 100]); } } diff --git a/src/hl/dataspace.rs b/src/hl/dataspace.rs new file mode 100644 index 000000000..4350e8ef3 --- /dev/null +++ b/src/hl/dataspace.rs @@ -0,0 +1,308 @@ +use std::fmt::{self, Debug}; +use std::ops::Deref; +use std::ptr; + +#[cfg(not(feature = "1.12.0"))] +use hdf5_sys::h5s::H5Sencode1; +#[cfg(feature = "1.12.0")] +use hdf5_sys::h5s::H5Sencode2; + +use hdf5_sys::h5s::{ + H5S_class_t, H5Scopy, H5Screate, H5Screate_simple, H5Sdecode, H5Sget_select_npoints, + H5Sget_simple_extent_dims, H5Sget_simple_extent_ndims, H5Sget_simple_extent_npoints, + H5Sget_simple_extent_type, H5Sselect_valid, H5S_UNLIMITED, +}; + +use crate::hl::extents::{Extent, Extents, Ix}; +use crate::hl::selection::RawSelection; +use crate::internal_prelude::*; + +/// Represents the HDF5 dataspace object. +#[repr(transparent)] +#[derive(Clone)] +pub struct Dataspace(Handle); + +impl ObjectClass for Dataspace { + const NAME: &'static str = "dataspace"; + const VALID_TYPES: &'static [H5I_type_t] = &[H5I_DATASPACE]; + + fn from_handle(handle: Handle) -> Self { + Self(handle) + } + + fn handle(&self) -> &Handle { + &self.0 + } + + fn short_repr(&self) -> Option { + if let Ok(e) = self.extents() { + Some(format!("{}", e)) + } else { + Some("(invalid)".into()) + } + } +} + +impl Debug for Dataspace { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.debug_fmt(f) + } +} + +impl Deref for Dataspace { + type Target = Object; + + fn deref(&self) -> &Object { + unsafe { self.transmute() } + } +} + +unsafe fn get_shape(space_id: hid_t) -> Result> { + let ndim = h5check(H5Sget_simple_extent_ndims(space_id))? as usize; + let mut dims = vec![0; ndim]; + h5check(H5Sget_simple_extent_dims(space_id, dims.as_mut_ptr(), ptr::null_mut()))?; + Ok(dims.into_iter().map(|x| x as _).collect()) +} + +unsafe fn get_simple_extents(space_id: hid_t) -> Result { + let ndim = h5check(H5Sget_simple_extent_ndims(space_id))? as usize; + let (mut dims, mut maxdims) = (vec![0; ndim], vec![0; ndim]); + h5check(H5Sget_simple_extent_dims(space_id, dims.as_mut_ptr(), maxdims.as_mut_ptr()))?; + let mut extents = Vec::with_capacity(ndim); + for i in 0..ndim { + let (dim, max) = (dims[i] as _, maxdims[i]); + let max = if max == H5S_UNLIMITED { None } else { Some(max as _) }; + extents.push(Extent::new(dim, max)); + } + Ok(SimpleExtents::from_vec(extents)) +} + +impl Dataspace { + pub fn try_new>(extents: T) -> Result { + Self::from_extents(&extents.into()) + } + + pub fn copy(&self) -> Self { + Self::from_id(h5lock!(H5Scopy(self.id()))).unwrap_or_else(|_| Self::invalid()) + } + + pub fn ndim(&self) -> usize { + h5call!(H5Sget_simple_extent_ndims(self.id())).unwrap_or(0) as _ + } + + pub fn shape(&self) -> Vec { + h5lock!(get_shape(self.id())).unwrap_or_default() + } + + pub fn maxdims(&self) -> Vec> { + self.extents().unwrap_or(Extents::Null).maxdims() + } + + pub fn is_resizable(&self) -> bool { + self.maxdims().iter().any(Option::is_none) + } + + pub fn is_null(&self) -> bool { + h5lock!(H5Sget_simple_extent_type(self.id())) == H5S_class_t::H5S_NULL + } + + pub fn is_scalar(&self) -> bool { + h5lock!(H5Sget_simple_extent_type(self.id())) == H5S_class_t::H5S_SCALAR + } + + pub fn is_simple(&self) -> bool { + h5lock!(H5Sget_simple_extent_type(self.id())) == H5S_class_t::H5S_SIMPLE + } + + pub fn is_valid(&self) -> bool { + h5lock!(H5Sselect_valid(self.id())) > 0 + } + + pub fn size(&self) -> usize { + match h5lock!(H5Sget_simple_extent_type(self.id())) { + H5S_class_t::H5S_SIMPLE => { + h5call!(H5Sget_simple_extent_npoints(self.id())).unwrap_or(0) as _ + } + H5S_class_t::H5S_SCALAR => 1, + _ => 0, + } + } + + #[allow(deprecated)] + pub fn encode(&self) -> Result> { + cfg_if::cfg_if! { + if #[cfg(feature = "1.12.0")] { + h5lock!({ + let mut len: size_t = 0; + let fapl = crate::hl::plist::file_access::FileAccessBuilder::new().finish()?; + h5try!(H5Sencode2(self.id(), ptr::null_mut(), &mut len, fapl.id())); + let mut buf = vec![0_u8; len]; + h5try!(H5Sencode2(self.id(), buf.as_mut_ptr().cast(), &mut len, fapl.id())); + Ok(buf) + }) + } else { + h5lock!({ + let mut len: size_t = 0; + h5try!(H5Sencode1(self.id(), ptr::null_mut(), &mut len as *mut _)); + let mut buf = vec![0_u8; len]; + h5try!(H5Sencode1(self.id(), buf.as_mut_ptr().cast(), &mut len as *mut _)); + Ok(buf) + }) + } + } + } + + pub fn decode(buf: T) -> Result + where + T: AsRef<[u8]>, + { + h5lock!(Self::from_id(h5try!(H5Sdecode(buf.as_ref().as_ptr().cast())))) + } + + fn from_extents(extents: &Extents) -> Result { + h5lock!(Self::from_id(match extents { + Extents::Null => H5Screate(H5S_class_t::H5S_NULL), + Extents::Scalar => H5Screate(H5S_class_t::H5S_SCALAR), + Extents::Simple(ref e) => { + let (mut dims, mut maxdims) = (vec![], vec![]); + for extent in e.iter() { + dims.push(extent.dim as _); + maxdims.push(extent.max.map_or(H5S_UNLIMITED, |x| x as _)); + } + H5Screate_simple(e.ndim() as _, dims.as_ptr(), maxdims.as_ptr()) + } + })) + } + + #[allow(clippy::match_wildcard_for_single_variants)] + pub fn extents(&self) -> Result { + h5lock!(match H5Sget_simple_extent_type(self.id()) { + H5S_class_t::H5S_NULL => Ok(Extents::Null), + H5S_class_t::H5S_SCALAR => Ok(Extents::Scalar), + H5S_class_t::H5S_SIMPLE => get_simple_extents(self.id()).map(Extents::Simple), + extent_type => fail!("Invalid extents type: {}", extent_type as c_int), + }) + } + + pub fn selection_size(&self) -> usize { + h5call!(H5Sget_select_npoints(self.id())).ok().map_or(0, |x| x as _) + } + + #[doc(hidden)] + pub fn select_raw>(&self, raw_sel: S) -> Result { + let raw_sel = raw_sel.into(); + sync(|| unsafe { + let space = self.copy(); + raw_sel.apply_to_dataspace(space.id())?; + ensure!(space.is_valid(), "Invalid selection, out of extents"); + Ok(space) + }) + } + + pub fn select>(&self, selection: S) -> Result { + let raw_sel = selection.into().into_raw(&self.shape())?; + self.select_raw(raw_sel) + } + + #[doc(hidden)] + pub fn get_raw_selection(&self) -> Result { + sync(|| unsafe { RawSelection::extract_from_dataspace(self.id()) }) + } + + pub fn get_selection(&self) -> Result { + let raw_sel = self.get_raw_selection()?; + Selection::from_raw(raw_sel) + } +} + +#[cfg(test)] +mod tests { + use hdf5_sys::h5i::H5I_INVALID_HID; + + use super::Dataspace; + use crate::internal_prelude::*; + + #[test] + fn test_dataspace_err() { + assert_err!(Dataspace::from_id(H5I_INVALID_HID), "Invalid handle id"); + } + + #[test] + fn test_dataspace_null() -> Result<()> { + let space = Dataspace::try_new(Extents::Null)?; + assert_eq!(space.ndim(), 0); + assert_eq!(space.shape(), vec![]); + assert_eq!(space.maxdims(), vec![]); + assert_eq!(space.size(), 0); + assert!(space.is_null()); + assert_eq!(space.extents()?, Extents::Null); + Ok(()) + } + + #[test] + fn test_dataspace_scalar() -> Result<()> { + let space = Dataspace::try_new(())?; + assert_eq!(space.ndim(), 0); + assert_eq!(space.shape(), vec![]); + assert_eq!(space.maxdims(), vec![]); + assert_eq!(space.size(), 1); + assert!(space.is_scalar()); + assert_eq!(space.extents()?, Extents::Scalar); + Ok(()) + } + + #[test] + fn test_dataspace_simple() -> Result<()> { + let space = Dataspace::try_new(123)?; + assert_eq!(space.ndim(), 1); + assert_eq!(space.shape(), vec![123]); + assert_eq!(space.maxdims(), vec![Some(123)]); + assert_eq!(space.size(), 123); + assert!(space.is_simple()); + assert_eq!(space.extents()?, Extents::simple(123)); + assert!(!space.is_resizable()); + + let space = Dataspace::try_new((5, 6..=10, 7..))?; + assert_eq!(space.ndim(), 3); + assert_eq!(space.shape(), vec![5, 6, 7]); + assert_eq!(space.maxdims(), vec![Some(5), Some(10), None]); + assert_eq!(space.size(), 210); + assert!(space.is_simple()); + assert_eq!(space.extents()?, Extents::simple((5, 6..=10, 7..))); + assert!(space.is_resizable()); + + Ok(()) + } + + #[test] + fn test_dataspace_copy() -> Result<()> { + let space = Dataspace::try_new((5, 6..=10, 7..))?; + let space_copy = space.copy(); + assert!(space_copy.is_valid()); + assert_eq!(space_copy.ndim(), space.ndim()); + assert_eq!(space_copy.shape(), space.shape()); + assert_eq!(space_copy.maxdims(), space.maxdims()); + Ok(()) + } + + #[test] + fn test_dataspace_encode() -> Result<()> { + let space = Dataspace::try_new((5, 6..=10, 7..))?; + let encoded = space.encode()?; + let decoded = Dataspace::decode(&encoded)?; + assert_eq!(decoded.extents().unwrap(), space.extents().unwrap()); + Ok(()) + } + + #[test] + fn test_dataspace_repr() -> Result<()> { + assert_eq!(&format!("{:?}", Dataspace::try_new(Extents::Null)?), ""); + assert_eq!(&format!("{:?}", Dataspace::try_new(())?), ""); + assert_eq!(&format!("{:?}", Dataspace::try_new(123)?), ""); + assert_eq!( + &format!("{:?}", Dataspace::try_new((5, 6..=10, 7..))?), + "" + ); + Ok(()) + } +} diff --git a/src/hl/datatype.rs b/src/hl/datatype.rs index 1b18e2cba..0da96ba4c 100644 --- a/src/hl/datatype.rs +++ b/src/hl/datatype.rs @@ -128,7 +128,7 @@ pub enum ByteOrder { None, } -#[cfg(hdf5_1_8_6)] +#[cfg(feature = "1.8.6")] impl From for ByteOrder { fn from(order: H5T_order_t) -> Self { match order { @@ -141,7 +141,7 @@ impl From for ByteOrder { } } -#[cfg(not(hdf5_1_8_6))] +#[cfg(not(feature = "1.8.6"))] impl From for ByteOrder { fn from(order: H5T_order_t) -> Self { match order { @@ -156,7 +156,7 @@ impl From for ByteOrder { impl Datatype { /// Get the total size of the datatype in bytes. pub fn size(&self) -> usize { - h5call!(H5Tget_size(self.id())).unwrap_or(0) as usize + h5lock!(H5Tget_size(self.id())) as usize } /// Get the byte order of the datatype. @@ -171,7 +171,6 @@ impl Datatype { let dst = dst.borrow(); let mut cdata = H5T_cdata_t::default(); h5lock!({ - let _e = silence_errors(); let noop = H5Tfind(*H5T_NATIVE_INT, *H5T_NATIVE_INT, &mut (&mut cdata as *mut _)); if H5Tfind(self.id(), dst.id(), &mut (&mut cdata as *mut _)) == noop { Some(Conversion::NoOp) @@ -200,11 +199,13 @@ impl Datatype { pub(crate) fn ensure_convertible(&self, dst: &Self, required: Conversion) -> Result<()> { // TODO: more detailed error messages after Debug/Display are implemented for Datatype if let Some(conv) = self.conv_path(dst) { - if conv > required { - fail!("{} conversion path required; available: {} conversion", required, conv) - } else { - Ok(()) - } + ensure!( + conv <= required, + "{} conversion path required; available: {} conversion", + required, + conv + ); + Ok(()) } else { fail!("no conversion paths found") } @@ -215,7 +216,7 @@ impl Datatype { h5lock!({ let id = self.id(); - let size = h5try!(H5Tget_size(id)) as usize; + let size = H5Tget_size(id) as usize; match H5Tget_class(id) { H5T_class_t::H5T_INTEGER => { let signed = match H5Tget_sign(id) { @@ -234,10 +235,10 @@ impl Datatype { let mut members: Vec = Vec::new(); for idx in 0..h5try!(H5Tget_nmembers(id)) as _ { let mut value: u64 = 0; - h5try!(H5Tget_member_value(id, idx, &mut value as *mut _ as *mut _)); + h5try!(H5Tget_member_value(id, idx, (&mut value as *mut u64).cast())); let name = H5Tget_member_name(id, idx); members.push(EnumMember { name: string_from_cstr(name), value }); - h5_free_memory(name as *mut _); + h5_free_memory(name.cast()); } let base_dt = Self::from_id(H5Tget_super(id))?; let (size, signed) = match base_dt.to_descriptor()? { @@ -259,7 +260,7 @@ impl Datatype { let mut fields: Vec = Vec::new(); for idx in 0..h5try!(H5Tget_nmembers(id)) as _ { let name = H5Tget_member_name(id, idx); - let offset = h5try!(H5Tget_member_offset(id, idx)); + let offset = H5Tget_member_offset(id, idx); let ty = Self::from_id(h5try!(H5Tget_member_type(id, idx)))?; fields.push(CompoundField { name: string_from_cstr(name), @@ -267,7 +268,7 @@ impl Datatype { offset: offset as _, index: idx as _, }); - h5_free_memory(name as *mut _); + h5_free_memory(name.cast()); } Ok(TD::Compound(CompoundType { fields, size })) } @@ -345,13 +346,13 @@ impl Datatype { let bool_id = h5try!(H5Tenum_create(*H5T_NATIVE_INT8)); h5try!(H5Tenum_insert( bool_id, - b"FALSE\0".as_ptr() as *const _, - &0_i8 as *const _ as *const _ + b"FALSE\0".as_ptr().cast(), + (&0_i8 as *const i8).cast() )); h5try!(H5Tenum_insert( bool_id, - b"TRUE\0".as_ptr() as *const _, - &1_i8 as *const _ as *const _ + b"TRUE\0".as_ptr().cast(), + (&1_i8 as *const i8).cast() )); Ok(bool_id) } @@ -363,7 +364,7 @@ impl Datatype { h5try!(H5Tenum_insert( enum_id, name.as_ptr(), - &member.value as *const _ as *const _ + (&member.value as *const u64).cast() )); } Ok(enum_id) diff --git a/src/hl/extents.rs b/src/hl/extents.rs new file mode 100644 index 000000000..b09faf499 --- /dev/null +++ b/src/hl/extents.rs @@ -0,0 +1,659 @@ +use std::borrow::Borrow; +use std::convert::identity; +use std::fmt::{self, Debug, Display}; +use std::ops::{Deref, RangeFrom, RangeInclusive}; + +use hdf5_sys::h5s::H5S_MAX_RANK; + +pub type Ix = usize; + +/// Current and maximum dimension size for a particular dimension. +#[derive(Clone, Copy, PartialEq, Eq, Hash)] +pub struct Extent { + /// Current dimension size. + pub dim: Ix, + /// Maximum dimension size (or `None` if unlimited). + pub max: Option, +} + +impl Debug for Extent { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "Extent({})", self) + } +} + +impl Display for Extent { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if let Some(max) = self.max { + if self.dim == max { + write!(f, "{}", self.dim) + } else { + write!(f, "{}..={}", self.dim, max) + } + } else { + write!(f, "{}..", self.dim) + } + } +} + +impl From for Extent { + fn from(dim: Ix) -> Self { + Self { dim, max: Some(dim) } + } +} + +impl From<(Ix, Option)> for Extent { + fn from((dim, max): (Ix, Option)) -> Self { + Self { dim, max } + } +} + +impl From> for Extent { + fn from(range: RangeFrom) -> Self { + Self { dim: range.start, max: None } + } +} + +impl From> for Extent { + fn from(range: RangeInclusive) -> Self { + Self { dim: *range.start(), max: Some(*range.end()) } + } +} + +impl + Clone> From<&T> for Extent { + fn from(extent: &T) -> Self { + extent.clone().into() + } +} + +impl Extent { + pub fn new(dim: Ix, max: Option) -> Self { + Self { dim, max } + } + + /// Creates a new extent with maximum size equal to the current size. + pub fn fixed(dim: Ix) -> Self { + Self { dim, max: Some(dim) } + } + + /// Creates a new extent with unlimited maximum size. + pub fn resizable(dim: Ix) -> Self { + Self { dim, max: None } + } + + pub fn is_fixed(&self) -> bool { + self.max.map_or(false, |max| self.dim >= max) + } + + pub fn is_resizable(&self) -> bool { + self.max.is_none() + } + + pub fn is_unlimited(&self) -> bool { + self.is_resizable() + } + + pub fn is_valid(&self) -> bool { + self.max.unwrap_or(self.dim) >= self.dim + } +} + +/// Extents for a simple dataspace, a multidimensional array of elements. +/// +/// The dimensionality of the dataspace (or the rank of the array) is fixed and is defined +/// at creation time. The size of each dimension can grow during the life time of the +/// dataspace from the current size up to the maximum size. Both the current size and the +/// maximum size are specified at creation time. The sizes of dimensions at any particular +/// time in the life of a dataspace are called the current dimensions, or the dataspace +/// extent. They can be queried along with the maximum sizes. +#[derive(Clone, PartialEq, Eq)] +pub struct SimpleExtents { + inner: Vec, +} + +impl SimpleExtents { + pub fn from_vec(extents: Vec) -> Self { + Self { inner: extents } + } + + pub fn new(extents: T) -> Self + where + T: IntoIterator, + T::Item: Into, + { + Self::from_vec(extents.into_iter().map(Into::into).collect()) + } + + pub fn fixed(extents: T) -> Self + where + T: IntoIterator, + T::Item: Borrow, + { + Self::from_vec(extents.into_iter().map(|x| Extent::fixed(*x.borrow())).collect()) + } + + /// Create extents resizable along all dimensions + pub fn resizable(extents: T) -> Self + where + T: IntoIterator, + T::Item: Borrow, + { + Self::from_vec(extents.into_iter().map(|x| Extent::resizable(*x.borrow())).collect()) + } + + pub fn ndim(&self) -> usize { + self.inner.len() + } + + pub fn dims(&self) -> Vec { + self.inner.iter().map(|e| e.dim).collect() + } + + pub fn maxdims(&self) -> Vec> { + self.inner.iter().map(|e| e.max).collect() + } + + pub fn size(&self) -> usize { + self.inner.iter().fold(1, |acc, x| acc * x.dim) + } + + pub fn is_fixed(&self) -> bool { + !self.inner.is_empty() && self.inner.iter().map(Extent::is_fixed).all(identity) + } + + pub fn is_resizable(&self) -> bool { + !self.inner.is_empty() && self.inner.iter().map(Extent::is_unlimited).any(identity) + } + + pub fn is_unlimited(&self) -> bool { + self.inner.iter().map(Extent::is_unlimited).any(identity) + } + + pub fn is_valid(&self) -> bool { + self.inner.iter().map(Extent::is_valid).all(identity) && self.ndim() <= H5S_MAX_RANK as _ + } + + pub fn iter( + &self, + ) -> impl ExactSizeIterator + DoubleEndedIterator { + self.inner.iter() + } +} + +impl Deref for SimpleExtents { + type Target = [Extent]; + + fn deref(&self) -> &Self::Target { + &self.inner + } +} + +impl Debug for SimpleExtents { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "SimpleExtents({})", self) + } +} + +impl Display for SimpleExtents { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + if self.ndim() == 0 { + write!(f, "()") + } else if self.ndim() == 1 { + write!(f, "({},)", self[0]) + } else { + let extents = self.iter().map(ToString::to_string).collect::>().join(", "); + write!(f, "({})", extents) + } + } +} + +macro_rules! impl_tuple { + () => (); + + ($head:ident, $($tail:ident,)*) => ( + #[allow(non_snake_case)] + impl<$head, $($tail,)*> From<($head, $($tail,)*)> for SimpleExtents + where $head: Into, $($tail: Into,)* + { + fn from(extents: ($head, $($tail,)*)) -> Self { + let ($head, $($tail,)*) = extents; + Self::from_vec(vec![($head).into(), $(($tail).into(),)*]) + } + } + + impl_tuple! { $($tail,)* } + ) +} + +impl_tuple! { T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, } + +macro_rules! impl_fixed { + ($tp:ty,) => (); + + ($tp:ty, $head:expr, $($tail:expr,)*) => ( + impl From<[$tp; $head]> for SimpleExtents { + fn from(extents: [$tp; $head]) -> Self { + Self::from_vec(extents.iter().map(Extent::from).collect()) + } + } + + impl From<&[$tp; $head]> for SimpleExtents { + fn from(extents: &[$tp; $head]) -> Self { + Self::from_vec(extents.iter().map(Extent::from).collect()) + } + } + + impl_fixed! { $tp, $($tail,)* } + ) +} + +macro_rules! impl_from { + ($tp:ty) => { + impl From<$tp> for SimpleExtents { + fn from(extent: $tp) -> Self { + (extent,).into() + } + } + + impl From<&$tp> for SimpleExtents { + fn from(extent: &$tp) -> Self { + (extent.clone(),).into() + } + } + + impl From> for SimpleExtents { + fn from(extents: Vec<$tp>) -> Self { + Self::from_vec(extents.iter().map(Extent::from).collect()) + } + } + + impl From<&Vec<$tp>> for SimpleExtents { + fn from(extents: &Vec<$tp>) -> Self { + Self::from_vec(extents.iter().map(Extent::from).collect()) + } + } + + impl From<&[$tp]> for SimpleExtents { + fn from(extents: &[$tp]) -> Self { + Self::from_vec(extents.iter().map(Extent::from).collect()) + } + } + + impl_fixed! { $tp, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, } + }; +} + +impl_from!(Ix); +impl_from!((Ix, Option)); +impl_from!(RangeFrom); +impl_from!(RangeInclusive); +impl_from!(Extent); + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Extents { + /// A null dataspace contains no data elements. + /// + /// Note that no selections can be applied to a null dataset as there is nothing to select. + Null, + + /// A scalar dataspace, representing just one element. + /// + /// The datatype of this one element may be very complex, e.g., a compound structure + /// with members being of any allowed HDF5 datatype, including multidimensional arrays, + /// strings, and nested compound structures. By convention, the rank of a scalar dataspace + /// is always 0 (zero); it may be thought of as a single, dimensionless point, though + /// that point may be complex. + Scalar, + + /// A simple dataspace, a multidimensional array of elements. + /// + /// The dimensionality of the dataspace (or the rank of the array) is fixed and is defined + /// at creation time. The size of each dimension can grow during the life time of the + /// dataspace from the current size up to the maximum size. Both the current size and the + /// maximum size are specified at creation time. The sizes of dimensions at any particular + /// time in the life of a dataspace are called the current dimensions, or the dataspace + /// extent. They can be queried along with the maximum sizes. + /// + /// A dimension can have an `UNLIMITED` maximum size. This results in a dataset which can + /// be resized after being created. + /// Do note that an unlimited dimension will force chunking of the + /// dataset which could result in excessive disk usage with the default chunk size. + /// It is recommended to apply some compression filter to such datasets. + Simple(SimpleExtents), +} + +impl Extents { + pub fn new>(extents: T) -> Self { + extents.into() + } + + /// Creates extents for a *null* dataspace. + pub fn null() -> Self { + Self::Null + } + + /// Creates extents for a *scalar* dataspace. + pub fn scalar() -> Self { + Self::Scalar + } + + /// Creates extents for a *simple* dataspace. + pub fn simple>(extents: T) -> Self { + Self::Simple(extents.into()) + } + + fn as_simple(&self) -> Option<&SimpleExtents> { + match self { + Self::Simple(ref e) => Some(e), + _ => None, + } + } + + /// Returns true if the extents type is *null*. + pub fn is_null(&self) -> bool { + self == &Self::Null + } + + /// Returns true if the extents type is *scalar*. + pub fn is_scalar(&self) -> bool { + self == &Self::Scalar + } + + /// Returns true if the extents type is *simple*. + pub fn is_simple(&self) -> bool { + self.as_simple().is_some() + } + + /// Returns the dataspace rank (or zero for null/scalar extents). + pub fn ndim(&self) -> usize { + self.as_simple().map_or(0, SimpleExtents::ndim) + } + + /// Returns the current extents (or empty list for null/scalar extents). + pub fn dims(&self) -> Vec { + self.as_simple().map_or_else(Vec::new, SimpleExtents::dims) + } + + /// Returns the maximum extents (or empty list for null/scalar extents). + pub fn maxdims(&self) -> Vec> { + self.as_simple().map_or_else(Vec::new, SimpleExtents::maxdims) + } + + /// Returns the total number of elements. + pub fn size(&self) -> usize { + match self { + Self::Null => 0, + Self::Scalar => 1, + Self::Simple(extents) => extents.size(), + } + } + + pub fn is_valid(&self) -> bool { + self.as_simple().map_or(true, SimpleExtents::is_valid) + } + + pub fn is_unlimited(&self) -> bool { + self.as_simple().map_or(true, SimpleExtents::is_unlimited) + } + + pub fn is_resizable(&self) -> bool { + self.as_simple().map_or(true, SimpleExtents::is_resizable) + } + + pub fn resizable(self) -> Self { + match self { + Self::Simple(extents) => SimpleExtents::resizable(extents.dims()).into(), + _ => self.clone(), + } + } + + pub fn iter( + &self, + ) -> impl ExactSizeIterator + DoubleEndedIterator { + ExtentsIter { inner: self.as_simple().map(SimpleExtents::iter) } + } + + pub fn slice(&self) -> Option<&[Extent]> { + if let Self::Simple(x) = self { + Some(x) + } else { + None + } + } +} + +pub struct ExtentsIter { + inner: Option, +} + +impl<'a, A: DoubleEndedIterator + ExactSizeIterator> Iterator + for ExtentsIter +{ + type Item = &'a Extent; + + fn next(&mut self) -> Option { + match self.inner { + Some(ref mut iter) => iter.next(), + None => None, + } + } +} + +impl<'a, A: DoubleEndedIterator + ExactSizeIterator> + DoubleEndedIterator for ExtentsIter +{ + fn next_back(&mut self) -> Option { + match self.inner { + Some(ref mut iter) => iter.next_back(), + None => None, + } + } +} + +impl<'a, A: DoubleEndedIterator + ExactSizeIterator> + ExactSizeIterator for ExtentsIter +{ + fn len(&self) -> usize { + match self.inner { + Some(ref iter) => iter.len(), + None => 0, + } + } +} + +impl Display for Extents { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::Null => write!(f, "null"), + Self::Scalar => write!(f, "scalar"), + Self::Simple(ref e) => write!(f, "{}", e), + } + } +} + +impl> From for Extents { + fn from(extents: T) -> Self { + let extents = extents.into(); + if extents.is_empty() { + Self::Scalar + } else { + Self::Simple(extents) + } + } +} + +impl From<()> for Extents { + fn from(_: ()) -> Self { + Self::Scalar + } +} + +impl From<&Self> for Extents { + fn from(extents: &Self) -> Self { + extents.clone() + } +} + +#[cfg(test)] +pub mod tests { + use super::{Extent, Extents, SimpleExtents}; + + #[test] + pub fn test_extent() { + let e1 = Extent { dim: 1, max: None }; + let e2 = Extent { dim: 1, max: Some(2) }; + let e3 = Extent { dim: 2, max: Some(2) }; + + assert_eq!(Extent::new(1, Some(2)), e2); + + assert_eq!(Extent::from(2), e3); + assert_eq!(Extent::from((1, Some(2))), e2); + assert_eq!(Extent::from(1..), e1); + assert_eq!(Extent::from(1..=2), e2); + + assert_eq!(Extent::from(&2), e3); + assert_eq!(Extent::from(&(1, Some(2))), e2); + assert_eq!(Extent::from(&(1..)), e1); + assert_eq!(Extent::from(&(1..=2)), e2); + + assert_eq!(format!("{}", e1), "1.."); + assert_eq!(format!("{:?}", e1), "Extent(1..)"); + assert_eq!(format!("{}", e2), "1..=2"); + assert_eq!(format!("{:?}", e2), "Extent(1..=2)"); + assert_eq!(format!("{}", e3), "2"); + assert_eq!(format!("{:?}", e3), "Extent(2)"); + + assert_eq!(Extent::resizable(1), e1); + assert_eq!(Extent::new(1, Some(2)), e2); + assert_eq!(Extent::fixed(2), e3); + + assert!(!e1.is_fixed() && !e2.is_fixed() && e3.is_fixed()); + assert!(e1.is_resizable() && !e2.is_resizable() && !e3.is_resizable()); + assert!(e1.is_unlimited() && !e2.is_unlimited() && !e3.is_unlimited()); + + assert!(e1.is_valid() && e2.is_valid() && e3.is_valid()); + assert!(!Extent::new(3, Some(2)).is_valid()); + } + + #[test] + pub fn test_simple_extents() { + type SE = SimpleExtents; + + let e1 = Extent::from(1..); + let e2 = Extent::from(2..=3); + let e3 = Extent::from(4); + + let v = vec![e1, e2, e3]; + let se = SE::from_vec(v.clone()); + assert_eq!(se.to_vec(), v); + assert_eq!(se.len(), 3); + assert_eq!(se.ndim(), 3); + assert_eq!(se.dims(), vec![1, 2, 4]); + assert_eq!(se.maxdims(), vec![None, Some(3), Some(4)]); + + let se1 = SE::new(&[(1, None), (2, Some(3)), (4, Some(4))]); + let se2 = SE::fixed(&[1, 2]); + let se3 = SE::resizable(&[1, 2]); + + assert_eq!(se1, se); + assert_eq!(se2, SE::new(&[1..=1, 2..=2])); + assert_eq!(se3, SE::new(&[1.., 2..])); + + assert!(!se1.is_fixed() && se2.is_fixed() && !se3.is_fixed()); + assert!(se1.is_unlimited() && !se2.is_unlimited() && se3.is_unlimited()); + assert!(se1.is_resizable() && !se2.is_resizable() && se3.is_resizable()); + + assert!(se1.is_valid() && se2.is_valid() && se3.is_valid()); + assert!(!SE::new(&[1..=2, 4..=3]).is_valid()); + assert!(!SE::new(vec![1; 100]).is_valid()); + + assert_eq!(format!("{}", se1), "(1.., 2..=3, 4)"); + assert_eq!(format!("{:?}", se1), "SimpleExtents((1.., 2..=3, 4))"); + assert_eq!(format!("{}", se2), "(1, 2)"); + assert_eq!(format!("{:?}", se2), "SimpleExtents((1, 2))"); + assert_eq!(format!("{}", se3), "(1.., 2..)"); + assert_eq!(format!("{:?}", se3), "SimpleExtents((1.., 2..))"); + assert_eq!(format!("{}", SE::new(&[1..])), "(1..,)"); + assert_eq!(format!("{:?}", SE::new(&[1..])), "SimpleExtents((1..,))"); + + assert_eq!( + SE::from((1, 2.., 3..=4, (5, Some(6)), Extent::from(7..=8))), + SE::new(&[(1, Some(1)), (2, None), (3, Some(4)), (5, Some(6)), (7, Some(8))]) + ); + assert_eq!(SE::from(1), SE::new(&[1])); + assert_eq!(SE::from(&1), SE::new(&[1])); + assert_eq!(SE::from(1..), SE::new(&[1..])); + assert_eq!(SE::from(&(1..)), SE::new(&[1..])); + assert_eq!(SE::from(1..=2), SE::new(&[1..=2])); + assert_eq!(SE::from(&(1..=2)), SE::new(&[1..=2])); + assert_eq!(SE::from((1, Some(2))), SE::new(&[1..=2])); + assert_eq!(SE::from(&(1, Some(2))), SE::new(&[1..=2])); + assert_eq!(SE::from(Extent::from(1..=2)), SE::new(&[1..=2])); + assert_eq!(SE::from(&Extent::from(1..=2)), SE::new(&[1..=2])); + assert_eq!(SE::from(vec![1, 2]), SE::new(&[1, 2])); + assert_eq!(SE::from(vec![1, 2].as_slice()), SE::new(&[1, 2])); + assert_eq!(SE::from([1, 2]), SE::new(&[1, 2])); + assert_eq!(SE::from(&[1, 2]), SE::new(&[1, 2])); + assert_eq!(SE::from(&vec![1, 2]), SE::new(&[1, 2])); + } + + #[test] + pub fn test_extents() { + let e = Extents::new(&[3, 4]); + assert_eq!(e.ndim(), 2); + assert_eq!(e.dims(), vec![3, 4]); + assert_eq!(e.size(), 12); + assert!(!e.is_scalar()); + assert!(!e.is_null()); + assert!(e.is_simple()); + assert!(e.is_valid()); + assert!(!e.is_resizable()); + assert!(!e.is_unlimited()); + assert_eq!(e.maxdims(), vec![Some(3), Some(4)]); + assert_eq!(e.as_simple(), Some(&SimpleExtents::new(&[3, 4]))); + + let e = Extents::new([1, 2]).resizable(); + assert_eq!(e.dims(), vec![1, 2]); + assert_eq!(e.maxdims(), vec![None, None]); + + let e = Extents::new((3..=2, 4)); + assert!(!e.is_valid()); + + let e = Extents::new((3.., 4)); + assert_eq!(e.ndim(), 2); + assert_eq!(e.dims(), vec![3, 4]); + assert_eq!(e.size(), 12); + assert!(e.is_resizable()); + assert!(e.is_unlimited()); + assert_eq!(e.maxdims(), vec![None, Some(4)]); + + let e = Extents::new((3.., 4..)); + assert!(e.is_resizable()); + assert!(e.is_unlimited()); + assert_eq!(e.maxdims(), vec![None, None]); + + let e = Extents::new(()); + assert!(e.is_scalar()); + + let e = Extents::new([0usize; 0]); + assert!(e.is_scalar()); + + let e = Extents::null(); + assert_eq!(e.ndim(), 0); + assert_eq!(e.dims(), vec![]); + assert_eq!(e.size(), 0); + assert!(!e.is_scalar()); + assert!(e.is_null()); + assert!(!e.is_simple()); + assert!(e.is_valid()); + + let e = Extents::scalar(); + assert_eq!(e.ndim(), 0); + assert_eq!(e.dims(), vec![]); + assert_eq!(e.size(), 1); + assert!(e.is_scalar()); + assert!(!e.is_null()); + assert!(!e.is_simple()); + assert!(e.is_valid()); + } +} diff --git a/src/hl/file.rs b/src/hl/file.rs index 687a8387f..6e7d1d9b7 100644 --- a/src/hl/file.rs +++ b/src/hl/file.rs @@ -1,12 +1,12 @@ use std::fmt::{self, Debug}; +use std::mem; use std::ops::Deref; use std::path::Path; use hdf5_sys::h5f::{ H5Fclose, H5Fcreate, H5Fflush, H5Fget_access_plist, H5Fget_create_plist, H5Fget_filesize, H5Fget_freespace, H5Fget_intent, H5Fget_obj_count, H5Fget_obj_ids, H5Fopen, H5F_ACC_DEFAULT, - H5F_ACC_EXCL, H5F_ACC_RDONLY, H5F_ACC_RDWR, H5F_ACC_TRUNC, H5F_OBJ_ALL, H5F_OBJ_FILE, - H5F_SCOPE_LOCAL, + H5F_ACC_EXCL, H5F_ACC_RDONLY, H5F_ACC_RDWR, H5F_ACC_TRUNC, H5F_SCOPE_LOCAL, }; use crate::hl::plist::{ @@ -133,15 +133,16 @@ impl File { } /// Returns objects IDs of the contained objects. NOTE: these are borrowed references. + #[allow(unused)] fn get_obj_ids(&self, types: c_uint) -> Vec { h5lock!({ let count = h5call!(H5Fget_obj_count(self.id(), types)).unwrap_or(0) as size_t; if count > 0 { let mut ids: Vec = Vec::with_capacity(count as _); - unsafe { - ids.set_len(count as _); - } if h5call!(H5Fget_obj_ids(self.id(), types, count, ids.as_mut_ptr())).is_ok() { + unsafe { + ids.set_len(count as _); + } ids.retain(|id| *id != self.id()); return ids; } @@ -151,26 +152,11 @@ impl File { } /// Closes the file and invalidates all open handles for contained objects. - pub fn close(self) { - h5lock!({ - let file_ids = self.get_obj_ids(H5F_OBJ_FILE); - let object_ids = self.get_obj_ids(H5F_OBJ_ALL & !H5F_OBJ_FILE); - for file_id in &file_ids { - if let Ok(handle) = Handle::try_new(*file_id) { - handle.decref_full(); - } - } - for object_id in &object_ids { - if let Ok(handle) = Handle::try_new(*object_id) { - handle.decref_full(); - } - } - H5Fclose(self.id()); - while self.is_valid() { - self.0.decref(); - } - self.0.decref(); - }) + pub fn close(self) -> Result<()> { + let id = self.id(); + // Ensure we only decref once + mem::forget(self.0); + h5call!(H5Fclose(id)).map(|_| ()) } /// Returns a copy of the file access property list. @@ -235,7 +221,7 @@ impl FileBuilder { /// Opens a file in a given mode. pub fn open_as>(&self, filename: P, mode: OpenMode) -> Result { let filename = filename.as_ref(); - if let OpenMode::Append = mode { + if mode == OpenMode::Append { if let Ok(file) = self.open_as(filename, OpenMode::ReadWrite) { return Ok(file); } @@ -376,16 +362,16 @@ pub mod tests { #[test] pub fn test_unable_to_open() { with_tmp_dir(|dir| { - assert_err!(File::open(&dir), "unable to open file"); - assert_err!(File::open_rw(&dir), "unable to open file"); - assert_err!(File::create_excl(&dir), "unable to create file"); - assert_err!(File::create(&dir), "unable to create file"); - assert_err!(File::append(&dir), "unable to create file"); + assert_err_re!(File::open(&dir), "unable to (?:synchronously )?open file"); + assert_err_re!(File::open_rw(&dir), "unable to (?:synchronously )?open file"); + assert_err_re!(File::create_excl(&dir), "unable to (?:synchronously )?create file"); + assert_err_re!(File::create(&dir), "unable to (?:synchronously )?create file"); + assert_err_re!(File::append(&dir), "unable to (?:synchronously )?create file"); }); with_tmp_path(|path| { fs::File::create(&path).unwrap().write_all(b"foo").unwrap(); assert!(fs::metadata(&path).is_ok()); - assert_err!(File::open(&path), "unable to open file"); + assert_err_re!(File::open(&path), "unable to (?:synchronously )?open file"); }) } @@ -393,7 +379,10 @@ pub mod tests { pub fn test_file_create() { with_tmp_path(|path| { File::create(&path).unwrap().create_group("foo").unwrap(); - assert_err!(File::create(&path).unwrap().group("foo"), "unable to open group"); + assert_err_re!( + File::create(&path).unwrap().group("foo"), + "unable to (?:synchronously )?open group" + ); }); } @@ -401,7 +390,7 @@ pub mod tests { pub fn test_file_create_excl() { with_tmp_path(|path| { File::create_excl(&path).unwrap(); - assert_err!(File::create_excl(&path), "unable to create file"); + assert_err_re!(File::create_excl(&path), "unable to (?:synchronously )?create file"); }); } @@ -419,9 +408,9 @@ pub mod tests { File::create(&path).unwrap().create_group("foo").unwrap(); let file = File::open(&path).unwrap(); file.group("foo").unwrap(); - assert_err!( + assert_err_re!( file.create_group("bar"), - "unable to create group: no write intent on file" + "unable to (?:synchronously )?create group: no write intent on file" ); assert_err!(File::open("/foo/bar/baz"), "unable to open file"); }); @@ -444,9 +433,9 @@ pub mod tests { assert!(file.size() > 0); let orig_size = fs::metadata(file.filename()).unwrap().len(); assert!(file.size() > orig_size); - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] assert_ne!(orig_size, 0); - #[cfg(not(hdf5_1_10_0))] + #[cfg(not(feature = "1.10.0"))] assert_eq!(orig_size, 0); assert!(file.flush().is_ok()); assert!(file.size() > 0); @@ -499,30 +488,92 @@ pub mod tests { }) } + fn rc(id: hid_t) -> Result { + h5call!(hdf5_sys::h5i::H5Iget_ref(id)).map(|x| x as _) + } + #[test] - pub fn test_close_automatic() { - // File going out of scope should just close its own handle + fn test_strong_close() { + use crate::hl::plist::file_access::FileCloseDegree; with_tmp_path(|path| { - let file = File::create(&path).unwrap(); + let file = File::with_options() + .with_fapl(|fapl| fapl.fclose_degree(FileCloseDegree::Strong)) + .create(&path) + .unwrap(); + assert_eq!(file.refcount(), 1); + let fileid = file.id(); + let group = file.create_group("foo").unwrap(); + assert_eq!(file.refcount(), 1); + assert_eq!(group.refcount(), 1); + let file_copy = group.file().unwrap(); + assert_eq!(group.refcount(), 1); + assert_eq!(file.refcount(), 2); + assert_eq!(file_copy.refcount(), 2); + drop(file); - assert!(group.is_valid()); - assert!(file_copy.is_valid()); + assert_eq!(rc(fileid).unwrap(), 1); + assert_eq!(group.refcount(), 1); + assert_eq!(file_copy.refcount(), 1); + + h5lock!({ + // Lock to ensure fileid does not get overwritten + let groupid = group.id(); + drop(file_copy); + assert!(rc(fileid).is_err()); + assert!(rc(groupid).is_err()); + assert!(!group.is_valid()); + drop(group); + }); }); } #[test] - pub fn test_close_manual() { - // File::close() should close handles of all related objects + fn test_weak_close() { + use crate::hl::plist::file_access::FileCloseDegree; + with_tmp_path(|path| { + let file = File::with_options() + .with_fapl(|fapl| fapl.fclose_degree(FileCloseDegree::Weak)) + .create(&path) + .unwrap(); + assert_eq!(file.refcount(), 1); + let fileid = file.id(); + + let group = file.create_group("foo").unwrap(); + assert_eq!(file.refcount(), 1); + assert_eq!(group.refcount(), 1); + + let file_copy = group.file().unwrap(); + assert_eq!(group.refcount(), 1); + assert_eq!(file.refcount(), 2); + assert_eq!(file_copy.refcount(), 2); + + drop(file); + assert_eq!(rc(fileid).unwrap(), 1); + assert_eq!(group.refcount(), 1); + assert_eq!(file_copy.refcount(), 1); + + h5lock!({ + // Lock to ensure fileid does not get overwritten + drop(file_copy); + assert!(rc(fileid).is_err()); + }); + assert_eq!(group.refcount(), 1); + }); + } + + #[test] + pub fn test_close_automatic() { + // File going out of scope should just close its own handle with_tmp_path(|path| { let file = File::create(&path).unwrap(); let group = file.create_group("foo").unwrap(); let file_copy = group.file().unwrap(); - file.close(); - assert!(!group.is_valid()); - assert!(!file_copy.is_valid()); - }) + drop(file); + assert!(group.is_valid()); + assert!(file_copy.is_valid()); + }); } #[test] @@ -532,7 +583,7 @@ pub mod tests { FileBuilder::new().with_fapl(|p| p.core_filebacked(false)).create(&path).unwrap(); file.create_group("x").unwrap(); assert!(file.is_valid()); - file.close(); + file.close().unwrap(); assert!(fs::metadata(&path).is_err()); assert_err!( FileBuilder::new().with_fapl(|p| p.core()).open(&path), @@ -548,7 +599,7 @@ pub mod tests { FileBuilder::new().with_fapl(|p| p.core_filebacked(true)).create(&path).unwrap(); assert!(file.is_valid()); file.create_group("bar").unwrap(); - file.close(); + file.close().unwrap(); assert!(fs::metadata(&path).is_ok()); File::open(&path).unwrap().group("bar").unwrap(); }) @@ -594,8 +645,8 @@ pub mod tests { let path = dir.join("qwe.h5"); let file = File::create(&path).unwrap(); assert_eq!(format!("{:?}", file), ""); - let root = file.file().unwrap(); - file.close(); + file.close().unwrap(); + let root = File::from_handle(Handle::invalid()); assert_eq!(format!("{:?}", root), ""); let file = File::open(&path).unwrap(); assert_eq!(format!("{:?}", file), ""); diff --git a/src/hl/filters.rs b/src/hl/filters.rs new file mode 100644 index 000000000..207bad997 --- /dev/null +++ b/src/hl/filters.rs @@ -0,0 +1,658 @@ +use std::collections::HashMap; +use std::ptr; + +use hdf5_sys::h5p::{ + H5Pget_filter2, H5Pget_nfilters, H5Pset_deflate, H5Pset_filter, H5Pset_fletcher32, H5Pset_nbit, + H5Pset_scaleoffset, H5Pset_shuffle, H5Pset_szip, +}; +use hdf5_sys::h5t::H5T_class_t; +use hdf5_sys::h5z::{ + H5Zfilter_avail, H5Zget_filter_info, H5Z_FILTER_CONFIG_DECODE_ENABLED, + H5Z_FILTER_CONFIG_ENCODE_ENABLED, H5Z_FILTER_DEFLATE, H5Z_FILTER_FLETCHER32, H5Z_FILTER_NBIT, + H5Z_FILTER_SCALEOFFSET, H5Z_FILTER_SHUFFLE, H5Z_FILTER_SZIP, H5Z_FLAG_OPTIONAL, + H5Z_SO_FLOAT_DSCALE, H5Z_SO_INT, H5_SZIP_EC_OPTION_MASK, H5_SZIP_MAX_PIXELS_PER_BLOCK, + H5_SZIP_NN_OPTION_MASK, +}; + +pub use hdf5_sys::h5z::H5Z_filter_t; + +use crate::internal_prelude::*; + +#[cfg(feature = "blosc")] +mod blosc; +#[cfg(feature = "lzf")] +mod lzf; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum SZip { + Entropy, + NearestNeighbor, +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ScaleOffset { + Integer(u16), + FloatDScale(u8), +} + +#[cfg(feature = "blosc")] +mod blosc_impl { + #[derive(Clone, Copy, Debug, PartialEq, Eq)] + #[cfg(feature = "blosc")] + pub enum Blosc { + BloscLZ, + LZ4, + LZ4HC, + Snappy, + ZLib, + ZStd, + } + + #[derive(Clone, Copy, Debug, PartialEq, Eq)] + #[cfg(feature = "blosc")] + pub enum BloscShuffle { + None, + Byte, + Bit, + } + + #[cfg(feature = "blosc")] + impl Default for BloscShuffle { + fn default() -> Self { + Self::Byte + } + } + + #[cfg(feature = "blosc")] + impl From for BloscShuffle { + fn from(shuffle: bool) -> Self { + if shuffle { + Self::Byte + } else { + Self::None + } + } + } + + #[cfg(feature = "blosc")] + impl Default for Blosc { + fn default() -> Self { + Self::BloscLZ + } + } + + #[cfg(feature = "blosc")] + pub fn blosc_get_nthreads() -> u8 { + h5lock!(super::blosc::blosc_get_nthreads()).max(0).min(255) as _ + } + + #[cfg(feature = "blosc")] + pub fn blosc_set_nthreads(num_threads: u8) -> u8 { + use std::os::raw::c_int; + let nthreads = h5lock!(super::blosc::blosc_set_nthreads(c_int::from(num_threads))); + nthreads.max(0).min(255) as _ + } +} + +#[cfg(feature = "blosc")] +pub use blosc_impl::*; + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum Filter { + Deflate(u8), + Shuffle, + Fletcher32, + SZip(SZip, u8), + NBit, + ScaleOffset(ScaleOffset), + #[cfg(feature = "lzf")] + LZF, + #[cfg(feature = "blosc")] + Blosc(Blosc, u8, BloscShuffle), + User(H5Z_filter_t, Vec), +} + +#[derive(Default, Clone, Copy, Debug, PartialEq, Eq)] +pub struct FilterInfo { + pub is_available: bool, + pub encode_enabled: bool, + pub decode_enabled: bool, +} + +/// This function requires a synchronisation with other calls to `hdf5` +pub(crate) fn register_filters() { + #[cfg(feature = "lzf")] + if let Err(e) = lzf::register_lzf() { + eprintln!("Error while registering LZF filter: {}", e); + } + #[cfg(feature = "blosc")] + if let Err(e) = blosc::register_blosc() { + eprintln!("Error while registering Blosc filter: {}", e); + } +} + +/// Returns `true` if deflate filter is available. +pub fn deflate_available() -> bool { + h5lock!(H5Zfilter_avail(H5Z_FILTER_DEFLATE) == 1) +} + +/// Returns `true` if deflate filter is available. +#[doc(hidden)] +#[deprecated(note = "deprecated; use deflate_available()")] +pub fn gzip_available() -> bool { + deflate_available() +} + +/// Returns `true` if szip filter is available. +pub fn szip_available() -> bool { + h5lock!(H5Zfilter_avail(H5Z_FILTER_SZIP) == 1) +} + +/// Returns `true` if LZF filter is available. +pub fn lzf_available() -> bool { + h5lock!(H5Zfilter_avail(32000) == 1) +} + +/// Returns `true` if Blosc filter is available. +pub fn blosc_available() -> bool { + h5lock!(H5Zfilter_avail(32001) == 1) +} + +impl Filter { + pub fn id(&self) -> H5Z_filter_t { + match self { + Self::Deflate(_) => H5Z_FILTER_DEFLATE, + Self::Shuffle => H5Z_FILTER_SHUFFLE, + Self::Fletcher32 => H5Z_FILTER_FLETCHER32, + Self::SZip(_, _) => H5Z_FILTER_SZIP, + Self::NBit => H5Z_FILTER_NBIT, + Self::ScaleOffset(_) => H5Z_FILTER_SCALEOFFSET, + #[cfg(feature = "lzf")] + Self::LZF => lzf::LZF_FILTER_ID, + #[cfg(feature = "blosc")] + Self::Blosc(_, _, _) => blosc::BLOSC_FILTER_ID, + Self::User(id, _) => *id, + } + } + + pub fn get_info(filter_id: H5Z_filter_t) -> FilterInfo { + if !h5call!(H5Zfilter_avail(filter_id)).map(|x| x > 0).unwrap_or_default() { + return FilterInfo::default(); + } + let mut flags: c_uint = 0; + h5lock!(H5Zget_filter_info(filter_id, &mut flags as *mut _)); + FilterInfo { + is_available: true, + encode_enabled: flags & H5Z_FILTER_CONFIG_ENCODE_ENABLED != 0, + decode_enabled: flags & H5Z_FILTER_CONFIG_DECODE_ENABLED != 0, + } + } + + pub fn is_available(&self) -> bool { + Self::get_info(self.id()).is_available + } + + pub fn encode_enabled(&self) -> bool { + Self::get_info(self.id()).encode_enabled + } + + pub fn decode_enabled(&self) -> bool { + Self::get_info(self.id()).decode_enabled + } + + pub fn deflate(level: u8) -> Self { + Self::Deflate(level) + } + + pub fn shuffle() -> Self { + Self::Shuffle + } + + pub fn fletcher32() -> Self { + Self::Fletcher32 + } + + pub fn szip(coding: SZip, px_per_block: u8) -> Self { + Self::SZip(coding, px_per_block) + } + + pub fn nbit() -> Self { + Self::NBit + } + + pub fn scale_offset(mode: ScaleOffset) -> Self { + Self::ScaleOffset(mode) + } + + #[cfg(feature = "lzf")] + pub fn lzf() -> Self { + Self::LZF + } + + #[cfg(feature = "blosc")] + pub fn blosc(complib: Blosc, clevel: u8, shuffle: T) -> Self + where + T: Into, + { + Self::Blosc(complib, clevel, shuffle.into()) + } + + #[cfg(feature = "blosc")] + pub fn blosc_blosclz(clevel: u8, shuffle: T) -> Self + where + T: Into, + { + Self::blosc(Blosc::BloscLZ, clevel, shuffle) + } + + #[cfg(feature = "blosc")] + pub fn blosc_lz4(clevel: u8, shuffle: T) -> Self + where + T: Into, + { + Self::blosc(Blosc::LZ4, clevel, shuffle) + } + + #[cfg(feature = "blosc")] + pub fn blosc_lz4hc(clevel: u8, shuffle: T) -> Self + where + T: Into, + { + Self::blosc(Blosc::LZ4HC, clevel, shuffle) + } + + #[cfg(feature = "blosc")] + pub fn blosc_snappy(clevel: u8, shuffle: T) -> Self + where + T: Into, + { + Self::blosc(Blosc::Snappy, clevel, shuffle) + } + + #[cfg(feature = "blosc")] + pub fn blosc_zlib(clevel: u8, shuffle: T) -> Self + where + T: Into, + { + Self::blosc(Blosc::ZLib, clevel, shuffle) + } + + #[cfg(feature = "blosc")] + pub fn blosc_zstd(clevel: u8, shuffle: T) -> Self + where + T: Into, + { + Self::blosc(Blosc::ZStd, clevel, shuffle) + } + + pub fn user(id: H5Z_filter_t, cdata: &[c_uint]) -> Self { + Self::User(id, cdata.to_vec()) + } + + fn parse_deflate(cdata: &[c_uint]) -> Result { + ensure!(!cdata.is_empty(), "expected cdata.len() >= 1 for deflate filter"); + ensure!(cdata[0] <= 9, "invalid deflate level: {}", cdata[0]); + Ok(Self::deflate(cdata[0] as _)) + } + + fn parse_shuffle(_cdata: &[c_uint]) -> Result { + Ok(Self::shuffle()) + } + + fn parse_fletcher32(_cdata: &[c_uint]) -> Result { + Ok(Self::fletcher32()) + } + + fn parse_nbit(_cdata: &[c_uint]) -> Result { + Ok(Self::nbit()) + } + + fn parse_szip(cdata: &[c_uint]) -> Result { + ensure!(cdata.len() >= 2, "expected cdata.len() >= 2 for szip filter"); + let m = cdata[0]; + ensure!( + (m & H5_SZIP_EC_OPTION_MASK != 0) != (m & H5_SZIP_NN_OPTION_MASK != 0), + "invalid szip mask: {}: expected EC or NN to be set", + m + ); + let szip_coding = + if m & H5_SZIP_EC_OPTION_MASK == 0 { SZip::NearestNeighbor } else { SZip::Entropy }; + let px_per_block = cdata[1]; + ensure!( + px_per_block <= H5_SZIP_MAX_PIXELS_PER_BLOCK, + "invalid pixels per block for szip filter: {}", + px_per_block + ); + Ok(Self::szip(szip_coding, px_per_block as _)) + } + + fn parse_scaleoffset(cdata: &[c_uint]) -> Result { + ensure!(cdata.len() >= 2, "expected cdata.len() >= 2 for scaleoffset filter"); + let scale_type = cdata[0]; + let mode = if scale_type == (H5Z_SO_INT as c_uint) { + ensure!( + cdata[1] <= c_uint::from(u16::max_value()), + "invalid int scale-offset: {}", + cdata[1] + ); + ScaleOffset::Integer(cdata[1] as _) + } else if scale_type == (H5Z_SO_FLOAT_DSCALE as c_uint) { + ensure!( + cdata[1] <= c_uint::from(u8::max_value()), + "invalid float scale-offset: {}", + cdata[1] + ); + ScaleOffset::FloatDScale(cdata[1] as _) + } else { + fail!("invalid scale type for scaleoffset filter: {}", cdata[0]) + }; + Ok(Self::scale_offset(mode)) + } + + #[cfg(feature = "lzf")] + fn parse_lzf(_cdata: &[c_uint]) -> Result { + Ok(Self::lzf()) + } + + #[cfg(feature = "blosc")] + fn parse_blosc(cdata: &[c_uint]) -> Result { + ensure!(cdata.len() >= 5, "expected at least length 5 cdata for blosc filter"); + ensure!(cdata.len() <= 7, "expected at most length 7 cdata for blosc filter"); + ensure!(cdata[4] <= 9, "invalid blosc clevel: {}", cdata[4]); + let clevel = cdata[4] as u8; + let shuffle = if cdata.len() >= 6 { + match cdata[5] { + blosc::BLOSC_NOSHUFFLE => BloscShuffle::None, + blosc::BLOSC_SHUFFLE => BloscShuffle::Byte, + blosc::BLOSC_BITSHUFFLE => BloscShuffle::Bit, + _ => fail!("invalid blosc shuffle: {}", cdata[5]), + } + } else { + BloscShuffle::Byte + }; + let complib = if cdata.len() >= 7 { + match cdata[6] { + blosc::BLOSC_BLOSCLZ => Blosc::BloscLZ, + blosc::BLOSC_LZ4 => Blosc::LZ4, + blosc::BLOSC_LZ4HC => Blosc::LZ4HC, + blosc::BLOSC_SNAPPY => Blosc::Snappy, + blosc::BLOSC_ZLIB => Blosc::ZLib, + blosc::BLOSC_ZSTD => Blosc::ZStd, + _ => fail!("invalid blosc complib: {}", cdata[6]), + } + } else { + Blosc::BloscLZ + }; + Ok(Self::blosc(complib, clevel, shuffle)) + } + + pub fn from_raw(filter_id: H5Z_filter_t, cdata: &[c_uint]) -> Result { + ensure!(filter_id > 0, "invalid filter id: {}", filter_id); + match filter_id { + H5Z_FILTER_DEFLATE => Self::parse_deflate(cdata), + H5Z_FILTER_SHUFFLE => Self::parse_shuffle(cdata), + H5Z_FILTER_FLETCHER32 => Self::parse_fletcher32(cdata), + H5Z_FILTER_SZIP => Self::parse_szip(cdata), + H5Z_FILTER_NBIT => Self::parse_nbit(cdata), + H5Z_FILTER_SCALEOFFSET => Self::parse_scaleoffset(cdata), + #[cfg(feature = "lzf")] + lzf::LZF_FILTER_ID => Self::parse_lzf(cdata), + #[cfg(feature = "blosc")] + blosc::BLOSC_FILTER_ID => Self::parse_blosc(cdata), + _ => Ok(Self::user(filter_id, cdata)), + } + } + + unsafe fn apply_deflate(plist_id: hid_t, level: u8) -> herr_t { + H5Pset_deflate(plist_id, c_uint::from(level)) + } + + unsafe fn apply_shuffle(plist_id: hid_t) -> herr_t { + H5Pset_shuffle(plist_id) + } + + unsafe fn apply_fletcher32(plist_id: hid_t) -> herr_t { + H5Pset_fletcher32(plist_id) + } + + unsafe fn apply_szip(plist_id: hid_t, coding: SZip, px_per_block: u8) -> herr_t { + let mask = match coding { + SZip::Entropy => H5_SZIP_EC_OPTION_MASK, + SZip::NearestNeighbor => H5_SZIP_NN_OPTION_MASK, + }; + H5Pset_szip(plist_id, mask, c_uint::from(px_per_block)) + } + + unsafe fn apply_nbit(plist_id: hid_t) -> herr_t { + H5Pset_nbit(plist_id) + } + + unsafe fn apply_scaleoffset(plist_id: hid_t, mode: ScaleOffset) -> herr_t { + let (scale_type, factor) = match mode { + ScaleOffset::Integer(bits) => (H5Z_SO_INT, c_int::from(bits)), + ScaleOffset::FloatDScale(factor) => (H5Z_SO_FLOAT_DSCALE, c_int::from(factor)), + }; + H5Pset_scaleoffset(plist_id, scale_type, factor) + } + + #[cfg(feature = "lzf")] + unsafe fn apply_lzf(plist_id: hid_t) -> herr_t { + Self::apply_user(plist_id, lzf::LZF_FILTER_ID, &[]) + } + + #[cfg(feature = "blosc")] + unsafe fn apply_blosc( + plist_id: hid_t, complib: Blosc, clevel: u8, shuffle: BloscShuffle, + ) -> herr_t { + let mut cdata: Vec = vec![0; 7]; + cdata[4] = c_uint::from(clevel); + cdata[5] = match shuffle { + BloscShuffle::None => blosc::BLOSC_NOSHUFFLE, + BloscShuffle::Byte => blosc::BLOSC_SHUFFLE, + BloscShuffle::Bit => blosc::BLOSC_BITSHUFFLE, + }; + cdata[6] = match complib { + Blosc::BloscLZ => blosc::BLOSC_BLOSCLZ, + Blosc::LZ4 => blosc::BLOSC_LZ4, + Blosc::LZ4HC => blosc::BLOSC_LZ4HC, + Blosc::Snappy => blosc::BLOSC_SNAPPY, + Blosc::ZLib => blosc::BLOSC_ZLIB, + Blosc::ZStd => blosc::BLOSC_ZSTD, + }; + Self::apply_user(plist_id, blosc::BLOSC_FILTER_ID, &cdata) + } + + unsafe fn apply_user(plist_id: hid_t, filter_id: H5Z_filter_t, cdata: &[c_uint]) -> herr_t { + // We're setting custom filters to optional, same way h5py does it, since + // the only mention of H5Z_FLAG_MANDATORY in the HDF5 source itself is + // in H5Pset_fletcher32() in H5Pocpl.c; for all other purposes than + // verifying checksums optional filter makes more sense than mandatory. + let cd_nelmts = cdata.len() as _; + let cd_values = if cd_nelmts == 0 { ptr::null() } else { cdata.as_ptr() }; + H5Pset_filter(plist_id, filter_id, H5Z_FLAG_OPTIONAL, cd_nelmts, cd_values) + } + + pub(crate) fn apply_to_plist(&self, id: hid_t) -> Result<()> { + h5try!(match self { + Self::Deflate(level) => Self::apply_deflate(id, *level), + Self::Shuffle => Self::apply_shuffle(id), + Self::Fletcher32 => Self::apply_fletcher32(id), + Self::SZip(coding, px_per_block) => Self::apply_szip(id, *coding, *px_per_block), + Self::NBit => Self::apply_nbit(id), + Self::ScaleOffset(mode) => Self::apply_scaleoffset(id, *mode), + #[cfg(feature = "lzf")] + Self::LZF => Self::apply_lzf(id), + #[cfg(feature = "blosc")] + Self::Blosc(complib, clevel, shuffle) => { + Self::apply_blosc(id, *complib, *clevel, *shuffle) + } + Self::User(filter_id, ref cdata) => Self::apply_user(id, *filter_id, cdata), + }); + Ok(()) + } + + pub(crate) fn extract_pipeline(plist_id: hid_t) -> Result> { + let mut filters = Vec::new(); + let mut name: Vec = vec![0; 257]; + let mut cd_values: Vec = vec![0; 32]; + h5lock!({ + let n_filters = h5try!(H5Pget_nfilters(plist_id)); + for idx in 0..n_filters { + let mut flags: c_uint = 0; + let mut cd_nelmts: size_t = cd_values.len() as _; + let filter_id = h5try!(H5Pget_filter2( + plist_id, + idx as _, + &mut flags as *mut _, + &mut cd_nelmts as *mut _, + cd_values.as_mut_ptr(), + name.len() as _, + name.as_mut_ptr(), + ptr::null_mut(), + )); + let cdata = &cd_values[..(cd_nelmts as _)]; + let flt = Self::from_raw(filter_id, cdata)?; + filters.push(flt); + } + Ok(filters) + }) + } +} + +pub(crate) fn validate_filters(filters: &[Filter], type_class: H5T_class_t) -> Result<()> { + const COMP_FILTER_IDS: &[H5Z_filter_t] = &[H5Z_FILTER_DEFLATE, H5Z_FILTER_SZIP, 32000, 32001]; + + let mut map: HashMap = HashMap::new(); + let mut comp_filter: Option<&Filter> = None; + + for filter in filters { + ensure!(filter.is_available(), "Filter not available: {:?}", filter); + + let id = filter.id(); + + if let Some(f) = map.get(&id) { + fail!("Duplicate filters: {:?} and {:?}", f, filter); + } else if COMP_FILTER_IDS.contains(&id) { + if let Some(comp_filter) = comp_filter { + fail!("Multiple compression filters: {:?} and {:?}", comp_filter, filter); + } + comp_filter = Some(filter); + } else if id == H5Z_FILTER_FLETCHER32 && map.contains_key(&H5Z_FILTER_SCALEOFFSET) { + fail!("Lossy scale-offset filter before fletcher2 checksum filter"); + } else if let Filter::ScaleOffset(mode) = filter { + match type_class { + H5T_class_t::H5T_INTEGER | H5T_class_t::H5T_ENUM => { + if let ScaleOffset::FloatDScale(_) = mode { + fail!("Invalid scale-offset mode for integer type: {:?}", mode); + } + } + H5T_class_t::H5T_FLOAT => { + if let ScaleOffset::Integer(_) = mode { + fail!("Invalid scale-offset mode for float type: {:?}", mode); + } + } + _ => fail!("Can only use scale-offset with ints/floats, got: {:?}", type_class), + } + } else if let Filter::SZip(_, _) = filter { + // https://github.com/h5py/h5py/issues/953 + if map.contains_key(&H5Z_FILTER_FLETCHER32) { + fail!("Fletcher32 filter must be placed after szip filter"); + } + } else if let Filter::Shuffle = filter { + if let Some(comp_filter) = comp_filter { + fail!("Shuffle filter placed after compression filter: {:?}", comp_filter); + } + } + map.insert(id, filter); + } + + Ok(()) +} + +#[cfg(test)] +mod tests { + use hdf5_sys::h5t::H5T_class_t; + + use super::{ + blosc_available, deflate_available, lzf_available, szip_available, validate_filters, + Filter, FilterInfo, SZip, ScaleOffset, + }; + use crate::test::with_tmp_file; + use crate::{plist::DatasetCreate, Result}; + + #[test] + fn test_filter_pipeline() -> Result<()> { + let mut comp_filters = vec![]; + if deflate_available() { + comp_filters.push(Filter::deflate(3)); + } + if szip_available() { + comp_filters.push(Filter::szip(SZip::Entropy, 8)); + } + assert_eq!(cfg!(feature = "lzf"), lzf_available()); + #[cfg(feature = "lzf")] + { + comp_filters.push(Filter::lzf()); + } + assert_eq!(cfg!(feature = "blosc"), blosc_available()); + #[cfg(feature = "blosc")] + { + use super::BloscShuffle; + comp_filters.push(Filter::blosc_blosclz(1, false)); + comp_filters.push(Filter::blosc_lz4(3, true)); + comp_filters.push(Filter::blosc_lz4hc(5, BloscShuffle::Bit)); + comp_filters.push(Filter::blosc_zlib(7, BloscShuffle::None)); + comp_filters.push(Filter::blosc_zstd(9, BloscShuffle::Byte)); + comp_filters.push(Filter::blosc_snappy(0, BloscShuffle::Bit)); + } + for c in &comp_filters { + assert!(c.is_available()); + assert!(c.encode_enabled()); + assert!(c.decode_enabled()); + + let pipeline = vec![ + Filter::nbit(), + Filter::shuffle(), + c.clone(), + Filter::fletcher32(), + Filter::scale_offset(ScaleOffset::Integer(3)), + ]; + validate_filters(&pipeline, H5T_class_t::H5T_INTEGER)?; + + let plist = DatasetCreate::try_new()?; + for flt in &pipeline { + flt.apply_to_plist(plist.id())?; + } + assert_eq!(Filter::extract_pipeline(plist.id())?, pipeline); + + let mut b = DatasetCreate::build(); + b.set_filters(&pipeline); + let plist = b.finish()?; + assert_eq!(Filter::extract_pipeline(plist.id())?, pipeline); + + let res = with_tmp_file(|file| { + file.new_dataset_builder() + .empty::() + .shape((10_000, 20)) + .with_dcpl(|p| p.set_filters(&pipeline)) + .create("foo") + .unwrap(); + let plist = file.dataset("foo").unwrap().dcpl().unwrap(); + Filter::extract_pipeline(plist.id()).unwrap() + }); + assert_eq!(res, pipeline); + } + + let bad_filter = Filter::user(12_345, &[1, 2, 3, 4, 5]); + assert_eq!(Filter::get_info(bad_filter.id()), FilterInfo::default()); + assert!(!bad_filter.is_available()); + assert!(!bad_filter.encode_enabled()); + assert!(!bad_filter.decode_enabled()); + assert_err!( + validate_filters(&[bad_filter], H5T_class_t::H5T_INTEGER), + "Filter not available" + ); + + Ok(()) + } +} diff --git a/src/hl/filters/blosc.rs b/src/hl/filters/blosc.rs new file mode 100644 index 000000000..5d307064d --- /dev/null +++ b/src/hl/filters/blosc.rs @@ -0,0 +1,244 @@ +use std::ptr; +use std::slice; + +use lazy_static::lazy_static; + +use hdf5_sys::h5p::{H5Pget_chunk, H5Pget_filter_by_id2, H5Pmodify_filter}; +use hdf5_sys::h5t::{H5Tclose, H5Tget_class, H5Tget_size, H5Tget_super, H5T_ARRAY}; +use hdf5_sys::h5z::{H5Z_class2_t, H5Z_filter_t, H5Zregister, H5Z_CLASS_T_VERS, H5Z_FLAG_REVERSE}; + +use crate::globals::{H5E_CALLBACK, H5E_PLIST}; +use crate::internal_prelude::*; + +pub use blosc_sys::{ + BLOSC_BITSHUFFLE, BLOSC_BLOSCLZ, BLOSC_LZ4, BLOSC_LZ4HC, BLOSC_MAX_TYPESIZE, BLOSC_NOSHUFFLE, + BLOSC_SHUFFLE, BLOSC_SNAPPY, BLOSC_VERSION_FORMAT, BLOSC_ZLIB, BLOSC_ZSTD, +}; + +pub use blosc_sys::{ + blosc_cbuffer_sizes, blosc_compcode_to_compname, blosc_compress, blosc_decompress, + blosc_get_nthreads, blosc_get_version_string, blosc_init, blosc_list_compressors, + blosc_set_compressor, blosc_set_nthreads, +}; + +const BLOSC_FILTER_NAME: &[u8] = b"blosc\0"; +pub const BLOSC_FILTER_ID: H5Z_filter_t = 32001; +const BLOSC_FILTER_VERSION: c_uint = 2; + +const BLOSC_FILTER_INFO: H5Z_class2_t = H5Z_class2_t { + version: H5Z_CLASS_T_VERS as _, + id: BLOSC_FILTER_ID, + encoder_present: 1, + decoder_present: 1, + name: BLOSC_FILTER_NAME.as_ptr().cast(), + can_apply: None, + set_local: Some(set_local_blosc), + filter: Some(filter_blosc), +}; + +lazy_static! { + static ref BLOSC_INIT: Result<(), &'static str> = { + unsafe { + blosc_init(); + } + let ret = unsafe { H5Zregister((&BLOSC_FILTER_INFO as *const H5Z_class2_t).cast()) }; + if H5ErrorCode::is_err_code(ret) { + return Err("Can't register Blosc filter"); + } + Ok(()) + }; +} + +pub fn register_blosc() -> Result<(), &'static str> { + (*BLOSC_INIT).clone() +} + +extern "C" fn set_local_blosc(dcpl_id: hid_t, type_id: hid_t, _space_id: hid_t) -> herr_t { + const MAX_NDIMS: usize = 32; + let mut flags: c_uint = 0; + let mut nelmts: size_t = 8; + let mut values: Vec = vec![0; 8]; + let ret = unsafe { + H5Pget_filter_by_id2( + dcpl_id, + BLOSC_FILTER_ID, + &mut flags as *mut _, + &mut nelmts as *mut _, + values.as_mut_ptr(), + 0, + ptr::null_mut(), + ptr::null_mut(), + ) + }; + if ret < 0 { + return -1; + } + nelmts = nelmts.max(4); + values[0] = BLOSC_FILTER_VERSION; + values[1] = BLOSC_VERSION_FORMAT; + let mut chunkdims: Vec = vec![0; MAX_NDIMS]; + let ndims: c_int = unsafe { H5Pget_chunk(dcpl_id, MAX_NDIMS as _, chunkdims.as_mut_ptr()) }; + if ndims < 0 { + return -1; + } + if ndims > MAX_NDIMS as _ { + h5err!("Chunk rank exceeds limit", H5E_PLIST, H5E_CALLBACK); + return -1; + } + let typesize: size_t = unsafe { H5Tget_size(type_id) }; + if typesize == 0 { + return -1; + } + let mut basetypesize = typesize; + unsafe { + if H5Tget_class(type_id) == H5T_ARRAY { + let super_type = H5Tget_super(type_id); + basetypesize = H5Tget_size(super_type); + H5Tclose(super_type); + } + } + if basetypesize > BLOSC_MAX_TYPESIZE as _ { + basetypesize = 1; + } + values[2] = basetypesize as _; + let mut bufsize = typesize; + for &chunkdim in chunkdims[..ndims as usize].iter() { + bufsize *= chunkdim as size_t; + } + values[3] = bufsize as _; + let r = unsafe { H5Pmodify_filter(dcpl_id, BLOSC_FILTER_ID, flags, nelmts, values.as_ptr()) }; + if r < 0 { + -1 + } else { + 1 + } +} + +struct BloscConfig { + pub typesize: size_t, + pub outbuf_size: size_t, + pub clevel: c_int, + pub doshuffle: c_int, + pub compname: *const c_char, +} + +impl Default for BloscConfig { + fn default() -> Self { + const DEFAULT_COMPNAME: &[u8] = b"blosclz\0"; + Self { + typesize: 0, + outbuf_size: 0, + clevel: 5, + doshuffle: 1, + compname: DEFAULT_COMPNAME.as_ptr().cast(), + } + } +} + +fn parse_blosc_cdata(cd_nelmts: size_t, cd_values: *const c_uint) -> Option { + let cdata = unsafe { slice::from_raw_parts(cd_values, cd_nelmts as _) }; + let mut cfg = BloscConfig { + typesize: cdata[2] as _, + outbuf_size: cdata[3] as _, + ..BloscConfig::default() + }; + if cdata.len() >= 5 { + cfg.clevel = cdata[4] as _; + }; + if cdata.len() >= 6 { + let v = unsafe { slice::from_raw_parts(blosc_get_version_string() as *mut u8, 4) }; + if v[0] <= b'1' && v[1] == b'.' && v[2] < b'8' && v[3] == b'.' { + h5err!( + "This Blosc library version is not supported. Please update to >= 1.8", + H5E_PLIST, + H5E_CALLBACK + ); + return None; + } + cfg.doshuffle = cdata[5] as _; + } + if cdata.len() >= 7 { + let r = unsafe { blosc_compcode_to_compname(cdata[6] as _, &mut cfg.compname as *mut _) }; + if r == -1 { + let complist = string_from_cstr(unsafe { blosc_list_compressors() }); + let errmsg = format!( + concat!( + "This Blosc library does not have support for the '{}' compressor, ", + "but only for: {}" + ), + string_from_cstr(cfg.compname), + complist + ); + h5err!(errmsg, H5E_PLIST, H5E_CALLBACK); + return None; + } + } + Some(cfg) +} + +extern "C" fn filter_blosc( + flags: c_uint, cd_nelmts: size_t, cd_values: *const c_uint, nbytes: size_t, + buf_size: *mut size_t, buf: *mut *mut c_void, +) -> size_t { + let cfg = if let Some(cfg) = parse_blosc_cdata(cd_nelmts, cd_values) { + cfg + } else { + return 0; + }; + if flags & H5Z_FLAG_REVERSE == 0 { + unsafe { filter_blosc_compress(&cfg, nbytes, buf_size, buf) } + } else { + unsafe { filter_blosc_decompress(&cfg, buf_size, buf) } + } +} + +unsafe fn filter_blosc_compress( + cfg: &BloscConfig, nbytes: size_t, buf_size: *mut size_t, buf: *mut *mut c_void, +) -> size_t { + let outbuf_size = *buf_size; + let outbuf = libc::malloc(outbuf_size); + if outbuf.is_null() { + h5err!("Can't allocate compression buffer", H5E_PLIST, H5E_CALLBACK); + return 0; + } + blosc_set_compressor(cfg.compname); + let status = + blosc_compress(cfg.clevel, cfg.doshuffle, cfg.typesize, nbytes, *buf, outbuf, nbytes); + if status > 0 { + libc::free(*buf); + *buf = outbuf; + status as _ + } else { + libc::free(outbuf); + 0 + } +} + +unsafe fn filter_blosc_decompress( + cfg: &BloscConfig, buf_size: *mut size_t, buf: *mut *mut c_void, +) -> size_t { + let mut outbuf_size: size_t = cfg.outbuf_size; + let (mut cbytes, mut blocksize): (size_t, size_t) = (0, 0); + blosc_cbuffer_sizes( + *buf, + &mut outbuf_size as *mut _, + &mut cbytes as *mut _, + &mut blocksize as *mut _, + ); + let outbuf = libc::malloc(outbuf_size); + if outbuf.is_null() { + h5err!("Can't allocate decompression buffer", H5E_PLIST, H5E_CALLBACK); + return 0; + } + let status = blosc_decompress(*buf, outbuf, outbuf_size); + if status > 0 { + libc::free(*buf); + *buf = outbuf; + *buf_size = outbuf_size as _; + status as _ + } else { + libc::free(outbuf); + h5err!("Blosc decompression error", H5E_PLIST, H5E_CALLBACK); + 0 + } +} diff --git a/src/hl/filters/lzf.rs b/src/hl/filters/lzf.rs new file mode 100644 index 000000000..2ca6823f9 --- /dev/null +++ b/src/hl/filters/lzf.rs @@ -0,0 +1,159 @@ +use std::ptr; +use std::slice; + +use lazy_static::lazy_static; + +use hdf5_sys::h5p::{H5Pget_chunk, H5Pget_filter_by_id2, H5Pmodify_filter}; +use hdf5_sys::h5t::H5Tget_size; +use hdf5_sys::h5z::{H5Z_class2_t, H5Z_filter_t, H5Zregister, H5Z_CLASS_T_VERS, H5Z_FLAG_REVERSE}; + +use crate::globals::{H5E_CALLBACK, H5E_PLIST}; +use crate::internal_prelude::*; +use lzf_sys::{lzf_compress, lzf_decompress, LZF_VERSION}; + +const LZF_FILTER_NAME: &[u8] = b"lzf\0"; +pub const LZF_FILTER_ID: H5Z_filter_t = 32000; +const LZF_FILTER_VERSION: c_uint = 4; + +const LZF_FILTER_INFO: H5Z_class2_t = H5Z_class2_t { + version: H5Z_CLASS_T_VERS as _, + id: LZF_FILTER_ID, + encoder_present: 1, + decoder_present: 1, + name: LZF_FILTER_NAME.as_ptr().cast(), + can_apply: None, + set_local: Some(set_local_lzf), + filter: Some(filter_lzf), +}; + +lazy_static! { + static ref LZF_INIT: Result<(), &'static str> = { + let ret = unsafe { H5Zregister((&LZF_FILTER_INFO as *const H5Z_class2_t).cast()) }; + if H5ErrorCode::is_err_code(ret) { + return Err("Can't register LZF filter"); + } + Ok(()) + }; +} + +pub fn register_lzf() -> Result<(), &'static str> { + (*LZF_INIT).clone() +} + +extern "C" fn set_local_lzf(dcpl_id: hid_t, type_id: hid_t, _space_id: hid_t) -> herr_t { + const MAX_NDIMS: usize = 32; + let mut flags: c_uint = 0; + let mut nelmts: size_t = 0; + let mut values: Vec = vec![0; 8]; + let ret = unsafe { + H5Pget_filter_by_id2( + dcpl_id, + LZF_FILTER_ID, + &mut flags as *mut _, + &mut nelmts as *mut _, + values.as_mut_ptr(), + 0, + ptr::null_mut(), + ptr::null_mut(), + ) + }; + if ret < 0 { + return -1; + } + nelmts = nelmts.max(3); + if values[0] == 0 { + values[0] = LZF_FILTER_VERSION; + } + if values[1] == 0 { + values[1] = LZF_VERSION; + } + let mut chunkdims: Vec = vec![0; MAX_NDIMS]; + let ndims: c_int = unsafe { H5Pget_chunk(dcpl_id, MAX_NDIMS as _, chunkdims.as_mut_ptr()) }; + if ndims < 0 { + return -1; + } + if ndims > MAX_NDIMS as _ { + h5err!("Chunk rank exceeds limit", H5E_PLIST, H5E_CALLBACK); + return -1; + } + let mut bufsize: size_t = unsafe { H5Tget_size(type_id) }; + if bufsize == 0 { + return -1; + } + for &chunkdim in chunkdims[..(ndims as usize)].iter() { + bufsize *= chunkdim as size_t; + } + values[2] = bufsize as _; + let r = unsafe { H5Pmodify_filter(dcpl_id, LZF_FILTER_ID, flags, nelmts, values.as_ptr()) }; + if r < 0 { + -1 + } else { + 1 + } +} + +extern "C" fn filter_lzf( + flags: c_uint, cd_nelmts: size_t, cd_values: *const c_uint, nbytes: size_t, + buf_size: *mut size_t, buf: *mut *mut c_void, +) -> size_t { + if flags & H5Z_FLAG_REVERSE == 0 { + unsafe { filter_lzf_compress(nbytes, buf_size, buf) } + } else { + unsafe { filter_lzf_decompress(cd_nelmts, cd_values, nbytes, buf_size, buf) } + } +} + +unsafe fn filter_lzf_compress( + nbytes: size_t, buf_size: *mut size_t, buf: *mut *mut c_void, +) -> size_t { + let outbuf_size = *buf_size; + let outbuf = libc::malloc(outbuf_size); + if outbuf.is_null() { + h5err!("Can't allocate compression buffer", H5E_PLIST, H5E_CALLBACK); + return 0; + } + let status = lzf_compress(*buf, nbytes as _, outbuf, outbuf_size as _); + if status == 0 { + libc::free(outbuf); + } else { + libc::free(*buf); + *buf = outbuf; + } + status as _ +} + +unsafe fn filter_lzf_decompress( + cd_nelmts: size_t, cd_values: *const c_uint, nbytes: size_t, buf_size: *mut size_t, + buf: *mut *mut c_void, +) -> size_t { + let cdata = slice::from_raw_parts(cd_values, cd_nelmts as _); + let mut outbuf_size = if cd_nelmts >= 3 && cdata[2] != 0 { cdata[2] as _ } else { *buf_size }; + let mut outbuf: *mut c_void; + let mut status: c_uint; + loop { + outbuf = libc::malloc(outbuf_size); + if outbuf.is_null() { + h5err!("Can't allocate decompression buffer", H5E_PLIST, H5E_CALLBACK); + return 0; + } + status = lzf_decompress(*buf, nbytes as _, outbuf, outbuf_size as _); + if status != 0 { + break; + } + libc::free(outbuf); + let e = errno::errno().0; + if e == 7 { + outbuf_size += *buf_size; + continue; + } else if e == 22 { + h5err!("Invalid data for LZF decompression", H5E_PLIST, H5E_CALLBACK); + } else { + h5err!("Unknown LZF decompression error", H5E_PLIST, H5E_CALLBACK); + } + return 0; + } + libc::free(*buf); + *buf = outbuf; + *buf_size = outbuf_size as _; + status as _ +} diff --git a/src/hl/group.rs b/src/hl/group.rs index 410eedd95..d5a0166d1 100644 --- a/src/hl/group.rs +++ b/src/hl/group.rs @@ -7,14 +7,16 @@ use hdf5_sys::{ h5d::H5Dopen2, h5g::{H5G_info_t, H5Gcreate2, H5Gget_info, H5Gopen2}, h5l::{ - H5L_info_t, H5L_iterate_t, H5Lcreate_hard, H5Lcreate_soft, H5Ldelete, H5Lexists, - H5Literate, H5Lmove, H5L_SAME_LOC, + H5L_info_t, H5L_iterate_t, H5L_type_t, H5Lcreate_external, H5Lcreate_hard, H5Lcreate_soft, + H5Ldelete, H5Lexists, H5Literate, H5Lmove, H5L_SAME_LOC, }, h5p::{H5Pcreate, H5Pset_create_intermediate_group}, + h5t::H5T_cset_t, }; use crate::globals::H5P_LINK_CREATE; use crate::internal_prelude::*; +use crate::{Location, LocationType}; /// Represents the HDF5 group object. #[repr(transparent)] @@ -102,34 +104,69 @@ impl Group { Self::from_id(h5try!(H5Gopen2(self.id(), name.as_ptr(), H5P_DEFAULT))) } - /// Creates a soft link. Note: `src` and `dst` are relative to the current object. - pub fn link_soft(&self, src: &str, dst: &str) -> Result<()> { + /// Creates a soft link. + /// + /// A soft link does not require the linked object to exist. + /// Note: `target` and `link_name` are relative to the current object. + pub fn link_soft(&self, target: &str, link_name: &str) -> Result<()> { // TODO: &mut self? h5lock!({ let lcpl = make_lcpl()?; - let src = to_cstring(src)?; - let dst = to_cstring(dst)?; - h5call!(H5Lcreate_soft(src.as_ptr(), self.id(), dst.as_ptr(), lcpl.id(), H5P_DEFAULT)) - .and(Ok(())) + let target = to_cstring(target)?; + let link_name = to_cstring(link_name)?; + h5call!(H5Lcreate_soft( + target.as_ptr(), + self.id(), + link_name.as_ptr(), + lcpl.id(), + H5P_DEFAULT + )) + .and(Ok(())) }) } - /// Creates a hard link. Note: `src` and `dst` are relative to the current object. - pub fn link_hard(&self, src: &str, dst: &str) -> Result<()> { + /// Creates a hard link. Note: `target` and `link_name` are relative to the current object. + pub fn link_hard(&self, target: &str, link_name: &str) -> Result<()> { // TODO: &mut self? - let src = to_cstring(src)?; - let dst = to_cstring(dst)?; + let target = to_cstring(target)?; + let link_name = to_cstring(link_name)?; h5call!(H5Lcreate_hard( self.id(), - src.as_ptr(), + target.as_ptr(), H5L_SAME_LOC, - dst.as_ptr(), + link_name.as_ptr(), H5P_DEFAULT, H5P_DEFAULT )) .and(Ok(())) } + /// Creates an external link. + /// + /// Note: `link_name` is relative to the current object, + /// `target` is relative to the root of the source file, + /// `target_file_name` is the path to the external file. + /// + /// For a detailed explanation on how `target_file_name` is resolved, see + /// [https://portal.hdfgroup.org/display/HDF5/H5L_CREATE_EXTERNAL](https://portal.hdfgroup.org/display/HDF5/H5L_CREATE_EXTERNAL) + pub fn link_external( + &self, target_file_name: &str, target: &str, link_name: &str, + ) -> Result<()> { + // TODO: &mut self? + let target = to_cstring(target)?; + let target_file_name = to_cstring(target_file_name)?; + let link_name = to_cstring(link_name)?; + h5call!(H5Lcreate_external( + target_file_name.as_ptr(), + target.as_ptr(), + self.id(), + link_name.as_ptr(), + H5P_DEFAULT, + H5P_DEFAULT, + )) + .and(Ok(())) + } + /// Relinks an object. Note: `name` and `path` are relative to the current object. pub fn relink(&self, name: &str, path: &str) -> Result<()> { // TODO: &mut self? @@ -162,9 +199,14 @@ impl Group { .unwrap_or(false) } + /// Instantiates a new typed dataset builder. + pub fn new_dataset(&self) -> DatasetBuilderEmpty { + self.new_dataset_builder().empty::() + } + /// Instantiates a new dataset builder. - pub fn new_dataset(&self) -> DatasetBuilder { - DatasetBuilder::::new(self) + pub fn new_dataset_builder(&self) -> DatasetBuilder { + DatasetBuilder::new(self) } /// Opens an existing dataset in the file or group. @@ -172,37 +214,192 @@ impl Group { let name = to_cstring(name)?; Dataset::from_id(h5try!(H5Dopen2(self.id(), name.as_ptr(), H5P_DEFAULT))) } +} - /// Returns names of all the members in the group, non-recursively. - pub fn member_names(&self) -> Result> { - extern "C" fn members_callback( - _id: hid_t, name: *const c_char, _info: *const H5L_info_t, op_data: *mut c_void, - ) -> herr_t { - panic::catch_unwind(|| { - let other_data: &mut Vec = unsafe { &mut *(op_data as *mut Vec) }; +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum TraversalOrder { + Name, + Creation, +} + +impl Default for TraversalOrder { + fn default() -> Self { + Self::Name + } +} + +impl From for H5_index_t { + fn from(v: TraversalOrder) -> Self { + match v { + TraversalOrder::Name => Self::H5_INDEX_NAME, + TraversalOrder::Creation => Self::H5_INDEX_CRT_ORDER, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum IterationOrder { + Increasing, + Decreasing, + Native, +} + +impl Default for IterationOrder { + fn default() -> Self { + Self::Native + } +} + +impl From for H5_iter_order_t { + fn from(v: IterationOrder) -> Self { + match v { + IterationOrder::Increasing => Self::H5_ITER_INC, + IterationOrder::Decreasing => Self::H5_ITER_DEC, + IterationOrder::Native => Self::H5_ITER_NATIVE, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum LinkType { + Hard, + Soft, + External, +} + +impl From for LinkType { + fn from(link_type: H5L_type_t) -> Self { + match link_type { + H5L_type_t::H5L_TYPE_HARD => Self::Hard, + H5L_type_t::H5L_TYPE_SOFT => Self::Soft, + _ => Self::External, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct LinkInfo { + pub link_type: LinkType, + pub creation_order: Option, + pub is_utf8: bool, +} - other_data.push(string_from_cstr(name)); +impl From<&H5L_info_t> for LinkInfo { + fn from(link: &H5L_info_t) -> Self { + let link_type = link.type_.into(); + let creation_order = if link.corder_valid == 1 { Some(link.corder) } else { None }; + let is_utf8 = link.cset == H5T_cset_t::H5T_CSET_UTF8; + Self { link_type, creation_order, is_utf8 } + } +} - 0 // Continue iteration +/// Iteration methods +impl Group { + /// Visits all objects in the group + pub fn iter_visit( + &self, iteration_order: IterationOrder, traversal_order: TraversalOrder, mut val: G, + mut op: F, + ) -> Result + where + F: Fn(&Self, &str, LinkInfo, &mut G) -> bool, + { + /// Struct used to pass a tuple + struct Vtable<'a, F, D> { + f: &'a mut F, + d: &'a mut D, + } + // Maps a closure to a C callback + // + // This function will be called multiple times, but never concurrently + extern "C" fn callback( + id: hid_t, name: *const c_char, info: *const H5L_info_t, op_data: *mut c_void, + ) -> herr_t + where + F: FnMut(&Group, &str, LinkInfo, &mut G) -> bool, + { + panic::catch_unwind(|| { + let vtable = op_data.cast::>(); + let vtable = unsafe { vtable.as_mut().expect("iter_visit: null op_data ptr") }; + unsafe { name.as_ref().expect("iter_visit: null name ptr") }; + let name = unsafe { std::ffi::CStr::from_ptr(name) }; + let info = unsafe { info.as_ref().expect("iter_vist: null info ptr") }; + let handle = Handle::try_borrow(id).expect("iter_visit: unable to create a handle"); + let group = Group::from_handle(handle); + if (vtable.f)(&group, name.to_string_lossy().as_ref(), info.into(), vtable.d) { + 0 + } else { + 1 + } }) .unwrap_or(-1) } - let callback_fn: H5L_iterate_t = Some(members_callback); - let iteration_position: *mut hsize_t = &mut { 0 as u64 }; - let mut result: Vec = Vec::new(); - let other_data: *mut c_void = &mut result as *mut _ as *mut c_void; + let callback_fn: H5L_iterate_t = Some(callback::); + let iter_pos: *mut hsize_t = &mut 0_u64; + + // Store our references on the heap + let mut vtable = Vtable { f: &mut op, d: &mut val }; + let other_data = (&mut vtable as *mut Vtable<_, _>).cast::(); h5call!(H5Literate( self.id(), - H5_index_t::H5_INDEX_NAME, - H5_iter_order_t::H5_ITER_INC, - iteration_position, + traversal_order.into(), + iteration_order.into(), + iter_pos, callback_fn, other_data - ))?; + )) + .map(|_| val) + } - Ok(result) + /// Visits all objects in the group using default iteration/traversal order. + pub fn iter_visit_default(&self, val: G, op: F) -> Result + where + F: Fn(&Self, &str, LinkInfo, &mut G) -> bool, + { + self.iter_visit(IterationOrder::default(), TraversalOrder::default(), val, op) + } + + fn get_all_of_type(&self, loc_type: LocationType) -> Result> { + self.iter_visit_default(vec![], |group, name, _info, objects| { + if let Ok(info) = group.loc_info_by_name(name) { + if info.loc_type == loc_type { + if let Ok(loc) = group.open_by_token(info.token) { + objects.push(loc); + return true; // ok, object extracted and pushed + } + } else { + return true; // ok, object is of another type, skipped + } + } + false // an error occured somewhere along the way + }) + } + + /// Returns all groups in the group, non-recursively + pub fn groups(&self) -> Result> { + self.get_all_of_type(LocationType::Group) + .map(|vec| vec.into_iter().map(|obj| unsafe { obj.cast_unchecked() }).collect()) + } + + /// Returns all datasets in the group, non-recursively + pub fn datasets(&self) -> Result> { + self.get_all_of_type(LocationType::Dataset) + .map(|vec| vec.into_iter().map(|obj| unsafe { obj.cast_unchecked() }).collect()) + } + + /// Returns all named types in the group, non-recursively + pub fn named_datatypes(&self) -> Result> { + self.get_all_of_type(LocationType::NamedDatatype) + .map(|vec| vec.into_iter().map(|obj| unsafe { obj.cast_unchecked() }).collect()) + } + + /// Returns the names of all objects in the group, non-recursively. + pub fn member_names(&self) -> Result> { + self.iter_visit_default(vec![], |_, name, _, names| { + names.push(name.to_owned()); + true + }) } } @@ -212,7 +409,12 @@ pub mod tests { #[test] pub fn test_debug() { - with_tmp_file(|file| { + use crate::hl::plist::file_access::FileCloseDegree; + with_tmp_path(|path| { + let file = File::with_options() + .with_fapl(|fapl| fapl.fclose_degree(FileCloseDegree::Strong)) + .create(&path) + .unwrap(); file.create_group("a/b/c").unwrap(); file.create_group("/a/d").unwrap(); let a = file.group("a").unwrap(); @@ -221,15 +423,23 @@ pub mod tests { assert_eq!(format!("{:?}", a), ""); assert_eq!(format!("{:?}", ab), ""); assert_eq!(format!("{:?}", abc), ""); - file.close(); - assert_eq!(format!("{:?}", a), ""); + h5lock!({ + file.close().unwrap(); + assert_eq!(format!("{:?}", a), ""); + drop(a); + drop(ab); + drop(abc); + }) }) } #[test] pub fn test_group() { with_tmp_file(|file| { - assert_err_re!(file.group("a"), "unable to open group: object.+doesn't exist"); + assert_err_re!( + file.group("a"), + "unable to (?:synchronously )?open group: object.+doesn't exist" + ); file.create_group("a").unwrap(); let a = file.group("a").unwrap(); assert_eq!(a.name(), "/a"); @@ -290,11 +500,11 @@ pub mod tests { file.group("/foo/hard/inner").unwrap(); assert_err_re!( file.link_hard("foo/test", "/foo/test/inner"), - "unable to create (?:hard )?link: name already exists" + "unable to (?:synchronously )?create (?:hard )?link: name already exists" ); assert_err_re!( file.link_hard("foo/bar", "/foo/baz"), - "unable to create (?:hard )?link: object.+doesn't exist" + "unable to (?:synchronously )?create (?:hard )?link: object.+doesn't exist" ); file.relink("/foo/hard", "/foo/hard2").unwrap(); file.group("/foo/hard2/inner").unwrap(); @@ -302,10 +512,13 @@ pub mod tests { file.group("/foo/baz/inner").unwrap(); file.group("/foo/hard2/inner").unwrap(); file.unlink("/foo/baz").unwrap(); - assert_err!(file.group("/foo/baz"), "unable to open group"); + assert_err_re!(file.group("/foo/baz"), "unable to (?:synchronously )?open group"); file.group("/foo/hard2/inner").unwrap(); file.unlink("/foo/hard2").unwrap(); - assert_err!(file.group("/foo/hard2/inner"), "unable to open group"); + assert_err_re!( + file.group("/foo/hard2/inner"), + "unable to (?:synchronously )?open group" + ); }) } @@ -318,14 +531,14 @@ pub mod tests { file.relink("/a/soft", "/a/soft2").unwrap(); file.group("/a/soft2/c").unwrap(); file.relink("a/b", "/a/d").unwrap(); - assert_err!(file.group("/a/soft2/c"), "unable to open group"); + assert_err_re!(file.group("/a/soft2/c"), "unable to (?:synchronously )?open group"); file.link_soft("/a/bar", "/a/baz").unwrap(); - assert_err!(file.group("/a/baz"), "unable to open group"); + assert_err_re!(file.group("/a/baz"), "unable to (?:synchronously )?open group"); file.create_group("/a/bar").unwrap(); file.group("/a/baz").unwrap(); file.unlink("/a/bar").unwrap(); - assert_err!(file.group("/a/bar"), "unable to open group"); - assert_err!(file.group("/a/baz"), "unable to open group"); + assert_err_re!(file.group("/a/bar"), "unable to (?:synchronously )?open group"); + assert_err_re!(file.group("/a/baz"), "unable to (?:synchronously )?open group"); }) } @@ -346,9 +559,9 @@ pub mod tests { assert!(group.link_exists("c")); assert!(!group.link_exists("a")); assert!(!group.link_exists("soft")); - #[cfg(not(hdf5_1_10_0))] + #[cfg(not(feature = "1.10.0"))] assert!(!group.link_exists("/")); - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] assert!(group.link_exists("/")); }) } @@ -366,7 +579,10 @@ pub mod tests { assert_err!(file.relink("bar", "/baz"), "unable to move link: name doesn't exist"); file.relink("test", "/foo/test").unwrap(); file.group("/foo/test").unwrap(); - assert_err_re!(file.group("test"), "unable to open group: object.+doesn't exist"); + assert_err_re!( + file.group("test"), + "unable to (?:synchronously )?open group: object.+doesn't exist" + ); }) } @@ -375,7 +591,7 @@ pub mod tests { with_tmp_file(|file| { file.create_group("/foo/bar").unwrap(); file.unlink("foo/bar").unwrap(); - assert_err!(file.group("/foo/bar"), "unable to open group"); + assert_err_re!(file.group("/foo/bar"), "unable to (?:synchronously )?open group"); assert!(file.group("foo").unwrap().is_empty()); }) } @@ -383,9 +599,12 @@ pub mod tests { #[test] pub fn test_dataset() { with_tmp_file(|file| { - file.new_dataset::().no_chunk().create("/foo/bar", (10, 20)).unwrap(); - file.new_dataset::().resizable(true).create("baz", (10, 20)).unwrap(); - file.new_dataset::().resizable(true).create_anon((10, 20)).unwrap(); + file.new_dataset::().no_chunk().shape((10, 20)).create("/foo/bar").unwrap(); + file.new_dataset::() + .shape(Extents::resizable((10, 20).into())) + .create("baz") + .unwrap(); + file.new_dataset::().shape((10.., 20..)).create(None).unwrap(); }); } @@ -396,12 +615,64 @@ pub mod tests { file.create_group("b").unwrap(); let group_a = file.group("a").unwrap(); let group_b = file.group("b").unwrap(); - file.new_dataset::().no_chunk().create("a/foo", (10, 20)).unwrap(); - file.new_dataset::().no_chunk().create("a/123", (10, 20)).unwrap(); - file.new_dataset::().no_chunk().create("a/bar", (10, 20)).unwrap(); + file.new_dataset::().no_chunk().shape((10, 20)).create("a/foo").unwrap(); + file.new_dataset::().no_chunk().shape((10, 20)).create("a/123").unwrap(); + file.new_dataset::().no_chunk().shape((10, 20)).create("a/bar").unwrap(); assert_eq!(group_a.member_names().unwrap(), vec!["123", "bar", "foo"]); assert_eq!(group_b.member_names().unwrap().len(), 0); assert_eq!(file.member_names().unwrap(), vec!["a", "b"]); }) } + + #[test] + pub fn test_external_link() { + with_tmp_dir(|dir| { + let file1 = dir.join("foo.h5"); + let file1 = File::create(file1).unwrap(); + let dset1 = file1.new_dataset::().create("foo").unwrap(); + dset1.write_scalar(&13).unwrap(); + + let file2 = dir.join("bar.h5"); + let file2 = File::create(file2).unwrap(); + file2.link_external("foo.h5", "foo", "bar").unwrap(); + let dset2 = file2.dataset("bar").unwrap(); + assert_eq!(dset2.read_scalar::().unwrap(), 13); + + file1.unlink("foo").unwrap(); + assert!(file1.dataset("foo").is_err()); + assert!(file2.dataset("bar").is_err()); + + // foo is only weakly closed + assert_eq!(dset1.read_scalar::().unwrap(), 13); + assert_eq!(dset2.read_scalar::().unwrap(), 13); + }) + } + + #[test] + pub fn test_iterators() { + with_tmp_file(|file| { + file.create_group("a").unwrap(); + file.create_group("b").unwrap(); + let group_a = file.group("a").unwrap(); + let _group_b = file.group("b").unwrap(); + file.new_dataset::().shape((10, 20)).create("a/foo").unwrap(); + file.new_dataset::().shape((10, 20)).create("a/123").unwrap(); + file.new_dataset::().shape((10, 20)).create("a/bar").unwrap(); + + let groups = file.groups().unwrap(); + assert_eq!(groups.len(), 2); + for group in groups { + assert!(matches!(group.name().as_ref(), "/a" | "/b")); + } + + let datasets = file.datasets().unwrap(); + assert_eq!(datasets.len(), 0); + + let datasets = group_a.datasets().unwrap(); + assert_eq!(datasets.len(), 3); + for dataset in datasets { + assert!(matches!(dataset.name().as_ref(), "/a/foo" | "/a/123" | "/a/bar")); + } + }) + } } diff --git a/src/hl/location.rs b/src/hl/location.rs index f6e12b27d..e1dc154c9 100644 --- a/src/hl/location.rs +++ b/src/hl/location.rs @@ -1,15 +1,33 @@ use std::fmt::{self, Debug}; +use std::mem::MaybeUninit; use std::ops::Deref; use std::ptr; +#[allow(deprecated)] +use hdf5_sys::h5o::H5Oset_comment; +#[cfg(feature = "1.12.0")] +use hdf5_sys::h5o::{ + H5O_info2_t, H5O_token_t, H5Oget_info3, H5Oget_info_by_name3, H5Oopen_by_token, +}; +#[cfg(not(feature = "1.10.3"))] +use hdf5_sys::h5o::{H5Oget_info1, H5Oget_info_by_name1}; +#[cfg(all(feature = "1.10.3", not(feature = "1.12.0")))] +use hdf5_sys::h5o::{H5Oget_info2, H5Oget_info_by_name2}; +#[cfg(feature = "1.10.3")] +use hdf5_sys::h5o::{H5O_INFO_BASIC, H5O_INFO_NUM_ATTRS, H5O_INFO_TIME}; +#[cfg(not(feature = "1.12.0"))] +use hdf5_sys::{h5::haddr_t, h5o::H5O_info1_t, h5o::H5Oopen_by_addr}; use hdf5_sys::{ + h5a::H5Aopen, h5f::H5Fget_name, h5i::{H5Iget_file_id, H5Iget_name}, - h5o::{H5Oget_comment, H5Oset_comment}, + h5o::{H5O_type_t, H5Oget_comment}, }; use crate::internal_prelude::*; +use super::attribute::AttributeBuilderEmpty; + /// Named location (file, group, dataset, named datatype). #[repr(transparent)] #[derive(Clone)] @@ -74,17 +92,210 @@ impl Location { } /// Set or the commment attached to the named object. + #[deprecated(note = "attributes are preferred to comments")] pub fn set_comment(&self, comment: &str) -> Result<()> { // TODO: &mut self? let comment = to_cstring(comment)?; + #[allow(deprecated)] h5call!(H5Oset_comment(self.id(), comment.as_ptr())).and(Ok(())) } /// Clear the commment attached to the named object. + #[deprecated(note = "attributes are preferred to comments")] pub fn clear_comment(&self) -> Result<()> { // TODO: &mut self? + #[allow(deprecated)] h5call!(H5Oset_comment(self.id(), ptr::null_mut())).and(Ok(())) } + + pub fn new_attr(&self) -> AttributeBuilderEmpty { + AttributeBuilder::new(self).empty::() + } + + pub fn new_attr_builder(&self) -> AttributeBuilder { + AttributeBuilder::new(self) + } + + pub fn attr(&self, name: &str) -> Result { + let name = to_cstring(name)?; + Attribute::from_id(h5try!(H5Aopen(self.id(), name.as_ptr(), H5P_DEFAULT))) + } + + pub fn attr_names(&self) -> Result> { + Attribute::attr_names(self) + } + + pub fn loc_info(&self) -> Result { + H5O_get_info(self.id(), true) + } + + pub fn loc_type(&self) -> Result { + Ok(H5O_get_info(self.id(), false)?.loc_type) + } + + pub fn loc_info_by_name(&self, name: &str) -> Result { + let name = to_cstring(name)?; + H5O_get_info_by_name(self.id(), name.as_ptr(), true) + } + + pub fn loc_type_by_name(&self, name: &str) -> Result { + let name = to_cstring(name)?; + Ok(H5O_get_info_by_name(self.id(), name.as_ptr(), false)?.loc_type) + } + + pub fn open_by_token(&self, token: LocationToken) -> Result { + H5O_open_by_token(self.id(), token) + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct LocationToken( + #[cfg(not(feature = "1.12.0"))] haddr_t, + #[cfg(feature = "1.12.0")] H5O_token_t, +); + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum LocationType { + Group, + Dataset, + NamedDatatype, + #[cfg(feature = "1.12.0")] + #[cfg_attr(docrs, doc(cfg(feature = "1.12.0")))] + TypeMap, +} + +impl From for LocationType { + fn from(loc_type: H5O_type_t) -> Self { + // we're assuming here that if a C API call returns H5O_TYPE_UNKNOWN (-1), then + // an error has occured anyway and has been pushed on the error stack so we'll + // catch it, and the value of -1 will never reach this conversion function + match loc_type { + H5O_type_t::H5O_TYPE_DATASET => Self::Dataset, + H5O_type_t::H5O_TYPE_NAMED_DATATYPE => Self::NamedDatatype, + #[cfg(feature = "1.12.0")] + H5O_type_t::H5O_TYPE_MAP => Self::TypeMap, + _ => Self::Group, // see the comment above + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +/// Metadata information describing a [`Location`] +/// +/// # Notes +/// +/// In order for all timestamps to be filled out, a few conditions must hold: +/// +/// - Minimum HDF5 library version is 1.10.3. +/// - Library version lower bound in the file access plist must be set to a least 1.10. This +/// can be done via `FileAccessBuilder::libver_v110` or `FileAccessBuilder::libver_latest`. +/// - For datasets, additionally, time tracking must be enabled (which is disabled +/// by default to improve access performance). This can be done via +/// `DatasetBuilder::track_times`. If tracking is enabled, ctime timestamp will likely be +/// filled out even if library version lower bound is not set), but the other three will +/// be zero. +pub struct LocationInfo { + /// Number of file where the object is located + pub fileno: u64, + /// Object address in file, or a token identifier + pub token: LocationToken, + /// Basic location type of the object + pub loc_type: LocationType, + /// Number of hard links to the object + pub num_links: usize, + /// Access time + pub atime: i64, + /// Modification time + pub mtime: i64, + /// Change time + pub ctime: i64, + /// Birth time + pub btime: i64, + /// Number of attributes attached to the object + pub num_attrs: usize, +} + +#[cfg(not(feature = "1.12.0"))] +impl From for LocationInfo { + fn from(info: H5O_info1_t) -> Self { + Self { + fileno: info.fileno as _, + token: LocationToken(info.addr), + loc_type: info.type_.into(), + num_links: info.rc as _, + atime: info.atime as _, + mtime: info.mtime as _, + ctime: info.ctime as _, + btime: info.btime as _, + num_attrs: info.num_attrs as _, + } + } +} + +#[cfg(feature = "1.12.0")] +impl From for LocationInfo { + fn from(info: H5O_info2_t) -> Self { + Self { + fileno: info.fileno as _, + token: LocationToken(info.token), + loc_type: info.type_.into(), + num_links: info.rc as _, + atime: info.atime as _, + mtime: info.mtime as _, + ctime: info.ctime as _, + btime: info.btime as _, + num_attrs: info.num_attrs as _, + } + } +} + +#[cfg(feature = "1.10.3")] +fn info_fields(full: bool) -> c_uint { + if full { + H5O_INFO_BASIC | H5O_INFO_NUM_ATTRS | H5O_INFO_TIME + } else { + H5O_INFO_BASIC + } +} + +#[allow(non_snake_case, unused_variables)] +fn H5O_get_info(loc_id: hid_t, full: bool) -> Result { + let mut info_buf = MaybeUninit::uninit(); + let info_ptr = info_buf.as_mut_ptr(); + #[cfg(feature = "1.12.0")] + h5call!(H5Oget_info3(loc_id, info_ptr, info_fields(full)))?; + #[cfg(all(feature = "1.10.3", not(feature = "1.12.0")))] + h5call!(H5Oget_info2(loc_id, info_ptr, info_fields(full)))?; + #[cfg(not(feature = "1.10.3"))] + h5call!(H5Oget_info1(loc_id, info_ptr))?; + let info = unsafe { info_buf.assume_init() }; + Ok(info.into()) +} + +#[allow(non_snake_case, unused_variables)] +fn H5O_get_info_by_name(loc_id: hid_t, name: *const c_char, full: bool) -> Result { + let mut info_buf = MaybeUninit::uninit(); + let info_ptr = info_buf.as_mut_ptr(); + #[cfg(feature = "1.12.0")] + h5call!(H5Oget_info_by_name3(loc_id, name, info_ptr, info_fields(full), H5P_DEFAULT))?; + #[cfg(all(feature = "1.10.3", not(feature = "1.12.0")))] + h5call!(H5Oget_info_by_name2(loc_id, name, info_ptr, info_fields(full), H5P_DEFAULT))?; + #[cfg(not(feature = "1.10.3"))] + h5call!(H5Oget_info_by_name1(loc_id, name, info_ptr, H5P_DEFAULT))?; + let info = unsafe { info_buf.assume_init() }; + Ok(info.into()) +} + +#[allow(non_snake_case)] +fn H5O_open_by_token(loc_id: hid_t, token: LocationToken) -> Result { + #[cfg(not(feature = "1.12.0"))] + { + Location::from_id(h5call!(H5Oopen_by_addr(loc_id, token.0))?) + } + #[cfg(feature = "1.12.0")] + { + Location::from_id(h5call!(H5Oopen_by_token(loc_id, token.0))?) + } } #[cfg(test)] @@ -114,6 +325,7 @@ pub mod tests { #[test] pub fn test_comment() { + #[allow(deprecated)] with_tmp_file(|file| { assert!(file.comment().is_none()); assert!(file.set_comment("foo").is_ok()); @@ -122,4 +334,87 @@ pub mod tests { assert!(file.comment().is_none()); }) } + + #[test] + pub fn test_location_info() { + let new_file = |path| { + cfg_if::cfg_if! { + if #[cfg(feature = "1.10.2")] { + File::with_options().with_fapl(|p| p.libver_v110()).create(path) + } else { + File::create(path) + } + } + }; + with_tmp_path(|path| { + let file = new_file(path).unwrap(); + let token = { + let group = file.create_group("group").unwrap(); + assert_eq!(file.loc_type_by_name("group").unwrap(), LocationType::Group); + let info = group.loc_info().unwrap(); + assert_eq!(info.num_links, 1); + assert_eq!(info.loc_type, LocationType::Group); + cfg_if::cfg_if! { + if #[cfg(feature = "1.10.2")] { + assert!(info.btime > 0); + } else { + assert_eq!(info.btime, 0); + } + } + assert_eq!(info.btime == 0, info.mtime == 0); + assert_eq!(info.btime == 0, info.ctime == 0); + assert_eq!(info.btime == 0, info.atime == 0); + assert_eq!(info.num_attrs, 0); + info.token + }; + let group = file.open_by_token(token).unwrap().as_group().unwrap(); + assert_eq!(group.name(), "/group"); + let token = { + let var = group + .new_dataset_builder() + .obj_track_times(true) + .empty::() + .create("var") + .unwrap(); + var.new_attr::().create("attr1").unwrap(); + var.new_attr::().create("attr2").unwrap(); + group.link_hard("var", "hard1").unwrap(); + group.link_hard("var", "hard2").unwrap(); + group.link_hard("var", "hard3").unwrap(); + group.link_hard("var", "hard4").unwrap(); + group.link_hard("var", "hard5").unwrap(); + group.link_soft("var", "soft1").unwrap(); + group.link_soft("var", "soft2").unwrap(); + group.link_soft("var", "soft3").unwrap(); + assert_eq!(file.loc_type_by_name("/group/var").unwrap(), LocationType::Dataset); + let info = var.loc_info().unwrap(); + assert_eq!(info.num_links, 6); // 1 + 5 + assert_eq!(info.loc_type, LocationType::Dataset); + assert!(info.ctime > 0); + cfg_if::cfg_if! { + if #[cfg(feature = "1.10.2")] { + assert!(info.btime > 0); + } else { + assert_eq!(info.btime, 0); + } + } + assert_eq!(info.btime == 0, info.mtime == 0); + assert_eq!(info.btime == 0, info.atime == 0); + assert_eq!(info.num_attrs, 2); + info.token + }; + let var = file.open_by_token(token).unwrap(); + // will open either the first or the last hard-linked object + assert!(var.name().starts_with("/group/hard")); + + let info = file.loc_info_by_name("group").unwrap(); + let group = file.open_by_token(info.token).unwrap(); + assert_eq!(group.name(), "/group"); + let info = file.loc_info_by_name("/group/var").unwrap(); + let var = file.open_by_token(info.token).unwrap(); + assert!(var.name().starts_with("/group/hard")); + + assert!(file.loc_info_by_name("gibberish").is_err()); + }) + } } diff --git a/src/hl/mod.rs b/src/hl/mod.rs deleted file mode 100644 index eebb45cb8..000000000 --- a/src/hl/mod.rs +++ /dev/null @@ -1,21 +0,0 @@ -pub mod container; -pub mod dataset; -pub mod datatype; -pub mod file; -pub mod group; -pub mod location; -pub mod object; -pub mod plist; -pub mod space; - -pub use self::{ - container::{Container, Reader, Writer}, - dataset::{Dataset, DatasetBuilder}, - datatype::{Conversion, Datatype}, - file::{File, FileBuilder, OpenMode}, - group::Group, - location::Location, - object::Object, - plist::PropertyList, - space::Dataspace, -}; diff --git a/src/hl/object.rs b/src/hl/object.rs index 14cc736dc..cabe0e970 100644 --- a/src/hl/object.rs +++ b/src/hl/object.rs @@ -1,7 +1,5 @@ use std::fmt::{self, Debug}; -use hdf5_sys::h5i::H5Iget_ref; - use crate::internal_prelude::*; /// Any HDF5 object that can be referenced through an identifier. @@ -37,25 +35,46 @@ impl Object { /// Returns reference count if the handle is valid and 0 otherwise. pub fn refcount(&self) -> u32 { - if self.is_valid() { - h5call!(H5Iget_ref(self.id())).unwrap_or(0) as _ - } else { - 0 - } + self.handle().refcount() } /// Returns `true` if the object has a valid unlocked identifier (`false` for pre-defined /// locked identifiers like property list classes). pub fn is_valid(&self) -> bool { - is_valid_user_id(self.id()) + self.handle().is_valid_user_id() } /// Returns type of the object. pub fn id_type(&self) -> H5I_type_t { - get_id_type(self.id()) + self.handle().id_type() + } + + pub(crate) fn try_borrow(&self) -> Result { + Handle::try_borrow(self.id()) } } +macro_rules! impl_downcast { + ($func:ident, $tp:ty) => { + impl Object { + #[doc = "Downcast the object into $tp if possible."] + pub fn $func(&self) -> Result<$tp> { + self.clone().cast() + } + } + }; +} + +impl_downcast!(as_file, File); +impl_downcast!(as_group, Group); +impl_downcast!(as_dataset, Dataset); +impl_downcast!(as_location, Location); +impl_downcast!(as_attr, Attribute); +impl_downcast!(as_container, Container); +impl_downcast!(as_datatype, Datatype); +impl_downcast!(as_dataspace, Dataspace); +impl_downcast!(as_plist, PropertyList); + #[cfg(test)] pub mod tests { use std::ops::Deref; @@ -63,7 +82,6 @@ pub mod tests { use hdf5_sys::{h5i::H5I_type_t, h5p::H5Pcreate}; use crate::globals::H5P_FILE_ACCESS; - use crate::handle::{is_valid_id, is_valid_user_id}; use crate::internal_prelude::*; pub struct TestObject(Handle); @@ -110,8 +128,7 @@ pub mod tests { let obj = TestObject::from_id(h5call!(H5Pcreate(*H5P_FILE_ACCESS)).unwrap()).unwrap(); assert!(obj.id() > 0); assert!(obj.is_valid()); - assert!(is_valid_id(obj.id())); - assert!(is_valid_user_id(obj.id())); + assert!(obj.handle().is_valid_id()); assert_eq!(obj.id_type(), H5I_type_t::H5I_GENPROP_LST); assert_eq!(obj.refcount(), 1); @@ -120,36 +137,60 @@ pub mod tests { obj.decref(); assert_eq!(obj.refcount(), 1); obj.decref(); - obj.decref(); - assert_eq!(obj.refcount(), 0); - assert!(!obj.is_valid()); - assert!(!is_valid_user_id(obj.id())); - assert!(!is_valid_id(obj.id())); + h5lock!({ + obj.decref(); + assert_eq!(obj.refcount(), 0); + assert!(!obj.is_valid()); + assert!(!obj.handle().is_valid_id()); + drop(obj); + }); } #[test] pub fn test_incref_decref_drop() { + use std::mem::ManuallyDrop; let mut obj = TestObject::from_id(h5call!(H5Pcreate(*H5P_FILE_ACCESS)).unwrap()).unwrap(); let obj_id = obj.id(); obj = TestObject::from_id(h5call!(H5Pcreate(*H5P_FILE_ACCESS)).unwrap()).unwrap(); assert_ne!(obj_id, obj.id()); assert!(obj.id() > 0); assert!(obj.is_valid()); - assert!(is_valid_id(obj.id())); - assert!(is_valid_user_id(obj.id())); + assert!(obj.handle().is_valid_id()); assert_eq!(obj.refcount(), 1); - let mut obj2 = TestObject::from_id(obj.id()).unwrap(); + + let obj2 = TestObject::from_id(obj.id()).unwrap(); obj2.incref(); assert_eq!(obj.refcount(), 2); assert_eq!(obj2.refcount(), 2); + drop(obj2); assert!(obj.is_valid()); assert_eq!(obj.refcount(), 1); - obj2 = TestObject::from_id(obj.id()).unwrap(); + + // obj is already owned, we must ensure we do not call drop on this without + // an incref + let mut obj2 = ManuallyDrop::new(TestObject::from_id(obj.id()).unwrap()); + assert_eq!(obj.refcount(), 1); + obj2.incref(); - obj.decref(); - obj.decref(); - assert_eq!(obj.id(), H5I_INVALID_HID); - assert_eq!(obj2.id(), H5I_INVALID_HID); + // We can now take, as we have exactly two handles + let obj2 = unsafe { ManuallyDrop::take(&mut obj2) }; + + h5lock!({ + // We must hold a lock here to prevent another thread creating an object + // with the same identifier as the one we just owned. Failing to do this + // might lead to the wrong object being dropped. + obj.decref(); + obj.decref(); + // We here have to dangling identifiers stored in obj and obj2. As this part + // is locked we know some other object is not going to created with these + // identifiers + assert!(!obj.is_valid()); + assert!(!obj2.is_valid()); + // By manually dropping we don't close some other unrelated objects. + // Dropping/closing an invalid object is allowed + drop(obj); + drop(obj2); + }); } } diff --git a/src/hl/plist.rs b/src/hl/plist.rs index 25eb90257..e5c79b564 100644 --- a/src/hl/plist.rs +++ b/src/hl/plist.rs @@ -11,9 +11,12 @@ use hdf5_sys::h5p::{ use crate::internal_prelude::*; +pub mod common; pub mod dataset_access; +pub mod dataset_create; pub mod file_access; pub mod file_create; +pub mod link_create; /// Represents the HDF5 property list. #[repr(transparent)] @@ -33,7 +36,7 @@ impl ObjectClass for PropertyList { } fn short_repr(&self) -> Option { - Some(self.class().ok().map_or_else(|| "unknown class".into(), |c| c.into())) + Some(self.class().ok().map_or_else(|| "unknown class".into(), Into::into)) } } @@ -119,9 +122,9 @@ impl Display for PropertyListClass { } } -impl Into for PropertyListClass { - fn into(self) -> String { - format!("{}", self) +impl From for String { + fn from(v: PropertyListClass) -> Self { + format!("{}", v) } } @@ -170,7 +173,7 @@ impl PropertyList { pub fn properties(&self) -> Vec { extern "C" fn callback(_: hid_t, name: *const c_char, data: *mut c_void) -> herr_t { panic::catch_unwind(|| { - let data = unsafe { &mut *(data as *mut Vec) }; + let data = unsafe { &mut *(data.cast::>()) }; let name = string_from_cstr(name); if !name.is_empty() { data.push(name); @@ -181,7 +184,7 @@ impl PropertyList { } let mut data = Vec::new(); - let data_ptr: *mut c_void = &mut data as *mut _ as *mut _; + let data_ptr: *mut c_void = (&mut data as *mut Vec<_>).cast(); h5lock!(H5Piterate(self.id(), ptr::null_mut(), Some(callback), data_ptr)); data @@ -198,10 +201,10 @@ impl PropertyList { let class_id = h5check(H5Pget_class(self.id()))?; let buf = H5Pget_class_name(class_id); if buf.is_null() { - return Err(Error::query().unwrap_or_else(|| "invalid property class".into())); + return Err(Error::query().unwrap_or_else(|_| "invalid property class".into())); } let name = string_from_cstr(buf); - h5_free_memory(buf as _); + h5_free_memory(buf.cast()); PropertyListClass::from_str(&name) }) } @@ -215,7 +218,9 @@ pub fn set_vlen_manager_libc(plist: hid_t) -> Result<()> { panic::catch_unwind(|| unsafe { libc::malloc(size) }).unwrap_or(ptr::null_mut()) } extern "C" fn free(ptr: *mut c_void, _info: *mut libc::c_void) { - let _ = panic::catch_unwind(|| unsafe { libc::free(ptr) }); + let _p = panic::catch_unwind(|| unsafe { + libc::free(ptr); + }); } h5try!(H5Pset_vlen_mem_manager( plist, diff --git a/src/hl/plist/common.rs b/src/hl/plist/common.rs new file mode 100644 index 000000000..ba594ebff --- /dev/null +++ b/src/hl/plist/common.rs @@ -0,0 +1,47 @@ +use hdf5_sys::h5p::{H5P_CRT_ORDER_INDEXED, H5P_CRT_ORDER_TRACKED}; + +use bitflags::bitflags; + +/// Attribute storage phase change thresholds. +/// +/// These thresholds determine the point at which attribute storage changes from +/// compact storage (i.e., storage in the object header) to dense storage (i.e., +/// storage in a heap and indexed with a B-tree). +/// +/// In the general case, attributes are initially kept in compact storage. When +/// the number of attributes exceeds `max_compact`, attribute storage switches to +/// dense storage. If the number of attributes subsequently falls below `min_dense`, +/// the attributes are returned to compact storage. +/// +/// If `max_compact` is set to 0 (zero), dense storage always used. +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct AttrPhaseChange { + /// Maximum number of attributes to be stored in compact storage (default: 8). + pub max_compact: u32, + /// Minimum number of attributes to be stored in dense storage (default: 6). + pub min_dense: u32, +} + +impl Default for AttrPhaseChange { + fn default() -> Self { + Self { max_compact: 8, min_dense: 6 } + } +} + +bitflags! { + /// Flags for tracking and indexing attribute creation order of an object. + /// + /// Default behavior is that attribute creation order is neither tracked nor indexed. + /// + /// Note that if a creation order index is to be built, it must be specified in + /// the object creation property list. HDF5 currently provides no mechanism to turn + /// on attribute creation order tracking at object creation time and to build the + /// index later. + #[derive(Default)] + pub struct AttrCreationOrder: u32 { + /// Attribute creation order is tracked but not necessarily indexed. + const TRACKED = H5P_CRT_ORDER_TRACKED as _; + /// Attribute creation order is indexed (requires to be tracked). + const INDEXED = H5P_CRT_ORDER_INDEXED as _; + } +} diff --git a/src/hl/plist/dataset_access.rs b/src/hl/plist/dataset_access.rs index 142b25617..4f448c1dc 100644 --- a/src/hl/plist/dataset_access.rs +++ b/src/hl/plist/dataset_access.rs @@ -9,11 +9,11 @@ use std::fmt::{self, Debug}; use std::ops::Deref; use hdf5_sys::h5p::{H5Pcreate, H5Pget_chunk_cache, H5Pset_chunk_cache}; -#[cfg(all(hdf5_1_10_0, h5_have_parallel))] +#[cfg(all(feature = "1.10.0", feature = "have-parallel"))] use hdf5_sys::h5p::{H5Pget_all_coll_metadata_ops, H5Pset_all_coll_metadata_ops}; -#[cfg(hdf5_1_8_17)] +#[cfg(feature = "1.8.17")] use hdf5_sys::h5p::{H5Pget_efile_prefix, H5Pset_efile_prefix}; -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] use hdf5_sys::{ h5d::H5D_vds_view_t, h5p::{ @@ -53,17 +53,16 @@ impl ObjectClass for DatasetAccess { impl Debug for DatasetAccess { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let _e = silence_errors(); let mut formatter = f.debug_struct("DatasetAccess"); formatter.field("chunk_cache", &self.chunk_cache()); - #[cfg(hdf5_1_8_17)] + #[cfg(feature = "1.8.17")] formatter.field("efile_prefix", &self.efile_prefix()); - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] { formatter.field("virtual_view", &self.virtual_view()); formatter.field("virtual_printf_gap", &self.virtual_printf_gap()); } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] formatter.field("all_coll_metadata_ops", &self.all_coll_metadata_ops()); formatter.finish() } @@ -87,25 +86,25 @@ impl Eq for DatasetAccess {} impl Clone for DatasetAccess { fn clone(&self) -> Self { - unsafe { self.deref().clone().cast() } + unsafe { self.deref().clone().cast_unchecked() } } } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum VirtualView { FirstMissing, LastAvailable, } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] impl Default for VirtualView { fn default() -> Self { Self::LastAvailable } } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] impl From for VirtualView { fn from(view: H5D_vds_view_t) -> Self { match view { @@ -115,12 +114,12 @@ impl From for VirtualView { } } -#[cfg(hdf5_1_10_0)] -impl Into for VirtualView { - fn into(self) -> H5D_vds_view_t { - match self { - Self::FirstMissing => H5D_vds_view_t::H5D_VDS_FIRST_MISSING, - _ => H5D_vds_view_t::H5D_VDS_LAST_AVAILABLE, +#[cfg(feature = "1.10.0")] +impl From for H5D_vds_view_t { + fn from(v: VirtualView) -> Self { + match v { + VirtualView::FirstMissing => Self::H5D_VDS_FIRST_MISSING, + VirtualView::LastAvailable => Self::H5D_VDS_LAST_AVAILABLE, } } } @@ -129,13 +128,13 @@ impl Into for VirtualView { #[derive(Clone, Debug, Default)] pub struct DatasetAccessBuilder { chunk_cache: Option, - #[cfg(hdf5_1_8_17)] + #[cfg(feature = "1.8.17")] efile_prefix: Option, - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] virtual_view: Option, - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] virtual_printf_gap: Option, - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] all_coll_metadata_ops: Option, } @@ -150,17 +149,17 @@ impl DatasetAccessBuilder { let mut builder = Self::default(); let v = plist.get_chunk_cache()?; builder.chunk_cache(v.nslots, v.nbytes, v.w0); - #[cfg(hdf5_1_8_17)] + #[cfg(feature = "1.8.17")] { let v = plist.get_efile_prefix()?; builder.efile_prefix(&v); } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] { builder.virtual_view(plist.get_virtual_view()?); builder.virtual_printf_gap(plist.get_virtual_printf_gap()?); } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] builder.all_coll_metadata_ops(plist.get_all_coll_metadata_ops()?); Ok(builder) } @@ -170,25 +169,25 @@ impl DatasetAccessBuilder { self } - #[cfg(hdf5_1_8_17)] + #[cfg(feature = "1.8.17")] pub fn efile_prefix(&mut self, prefix: &str) -> &mut Self { self.efile_prefix = Some(prefix.into()); self } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub fn virtual_view(&mut self, view: VirtualView) -> &mut Self { self.virtual_view = Some(view); self } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub fn virtual_printf_gap(&mut self, gap_size: usize) -> &mut Self { self.virtual_printf_gap = Some(gap_size); self } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] pub fn all_coll_metadata_ops(&mut self, is_collective: bool) -> &mut Self { self.all_coll_metadata_ops = Some(is_collective); self @@ -198,14 +197,14 @@ impl DatasetAccessBuilder { if let Some(v) = self.chunk_cache { h5try!(H5Pset_chunk_cache(id, v.nslots as _, v.nbytes as _, v.w0 as _)); } - #[cfg(hdf5_1_8_17)] + #[cfg(feature = "1.8.17")] { if let Some(ref v) = self.efile_prefix { let v = to_cstring(v.as_ref())?; h5try!(H5Pset_efile_prefix(id, v.as_ptr())); } } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] { if let Some(v) = self.virtual_view { h5try!(H5Pset_virtual_view(id, v.into())); @@ -214,7 +213,7 @@ impl DatasetAccessBuilder { h5try!(H5Pset_virtual_printf_gap(id, v as _)); } } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] { if let Some(v) = self.all_coll_metadata_ops { h5try!(H5Pset_all_coll_metadata_ops(id, v as _)); @@ -223,11 +222,14 @@ impl DatasetAccessBuilder { Ok(()) } + pub fn apply(&self, plist: &mut DatasetAccess) -> Result<()> { + h5lock!(self.populate_plist(plist.id())) + } + pub fn finish(&self) -> Result { h5lock!({ - let plist = DatasetAccess::try_new()?; - self.populate_plist(plist.id())?; - Ok(plist) + let mut plist = DatasetAccess::try_new()?; + self.apply(&mut plist).map(|_| plist) }) } } @@ -239,7 +241,7 @@ impl DatasetAccess { } pub fn copy(&self) -> Self { - unsafe { self.deref().copy().cast() } + unsafe { self.deref().copy().cast_unchecked() } } pub fn build() -> DatasetAccessBuilder { @@ -261,46 +263,46 @@ impl DatasetAccess { self.get_chunk_cache().unwrap_or_else(|_| ChunkCache::default()) } - #[cfg(hdf5_1_8_17)] + #[cfg(feature = "1.8.17")] #[doc(hidden)] pub fn get_efile_prefix(&self) -> Result { h5lock!(get_h5_str(|m, s| H5Pget_efile_prefix(self.id(), m, s))) } - #[cfg(hdf5_1_8_17)] + #[cfg(feature = "1.8.17")] pub fn efile_prefix(&self) -> String { self.get_efile_prefix().ok().unwrap_or_else(|| "".into()) } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] #[doc(hidden)] pub fn get_virtual_view(&self) -> Result { h5get!(H5Pget_virtual_view(self.id()): H5D_vds_view_t).map(Into::into) } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub fn virtual_view(&self) -> VirtualView { - self.get_virtual_view().ok().unwrap_or_else(VirtualView::default) + self.get_virtual_view().ok().unwrap_or_default() } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] #[doc(hidden)] pub fn get_virtual_printf_gap(&self) -> Result { h5get!(H5Pget_virtual_printf_gap(self.id()): hsize_t).map(|x| x as _) } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub fn virtual_printf_gap(&self) -> usize { self.get_virtual_printf_gap().unwrap_or(0) } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] #[doc(hidden)] pub fn get_all_coll_metadata_ops(&self) -> Result { h5get!(H5Pget_all_coll_metadata_ops(self.id()): hbool_t).map(|x| x > 0) } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] pub fn all_coll_metadata_ops(&self) -> bool { self.get_all_coll_metadata_ops().unwrap_or(false) } diff --git a/src/hl/plist/dataset_create.rs b/src/hl/plist/dataset_create.rs new file mode 100644 index 000000000..f3c3f8852 --- /dev/null +++ b/src/hl/plist/dataset_create.rs @@ -0,0 +1,899 @@ +//! Dataset creation properties. + +use std::fmt::{self, Debug}; +use std::ops::Deref; +use std::ptr; + +#[cfg(feature = "1.10.0")] +use bitflags::bitflags; + +use hdf5_sys::h5d::{H5D_alloc_time_t, H5D_fill_time_t, H5D_fill_value_t, H5D_layout_t}; +use hdf5_sys::h5f::H5F_UNLIMITED; +use hdf5_sys::h5p::{ + H5Pall_filters_avail, H5Pcreate, H5Pfill_value_defined, H5Pget_alloc_time, + H5Pget_attr_creation_order, H5Pget_attr_phase_change, H5Pget_chunk, H5Pget_external, + H5Pget_external_count, H5Pget_fill_time, H5Pget_fill_value, H5Pget_layout, + H5Pget_obj_track_times, H5Pset_alloc_time, H5Pset_attr_creation_order, + H5Pset_attr_phase_change, H5Pset_chunk, H5Pset_external, H5Pset_fill_time, H5Pset_fill_value, + H5Pset_layout, H5Pset_obj_track_times, +}; +use hdf5_sys::h5t::H5Tget_class; +use hdf5_sys::h5z::H5Z_filter_t; +#[cfg(feature = "1.10.0")] +use hdf5_sys::{ + h5d::H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS, + h5p::{ + H5Pget_chunk_opts, H5Pget_virtual_count, H5Pget_virtual_dsetname, H5Pget_virtual_filename, + H5Pget_virtual_srcspace, H5Pget_virtual_vspace, H5Pset_chunk_opts, H5Pset_virtual, + }, +}; +use hdf5_types::{OwnedDynValue, TypeDescriptor}; + +use crate::dim::Dimension; +use crate::globals::H5P_DATASET_CREATE; +use crate::hl::datatype::Datatype; +use crate::hl::filters::{validate_filters, Filter, SZip, ScaleOffset}; +#[cfg(feature = "blosc")] +use crate::hl::filters::{Blosc, BloscShuffle}; +pub use crate::hl::plist::common::{AttrCreationOrder, AttrPhaseChange}; +use crate::internal_prelude::*; + +/// Dataset creation properties. +#[repr(transparent)] +pub struct DatasetCreate(Handle); + +impl ObjectClass for DatasetCreate { + const NAME: &'static str = "dataset creation property list"; + const VALID_TYPES: &'static [H5I_type_t] = &[H5I_GENPROP_LST]; + + fn from_handle(handle: Handle) -> Self { + Self(handle) + } + + fn handle(&self) -> &Handle { + &self.0 + } + + fn validate(&self) -> Result<()> { + let class = self.class()?; + if class != PropertyListClass::DatasetCreate { + fail!("expected dataset creation property list, got {:?}", class); + } + Ok(()) + } +} + +impl Debug for DatasetCreate { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = f.debug_struct("DatasetCreate"); + formatter.field("filters", &self.filters()); + formatter.field("alloc_time", &self.alloc_time()); + formatter.field("fill_time", &self.fill_time()); + formatter.field("fill_value", &self.fill_value_defined()); + formatter.field("chunk", &self.chunk()); + formatter.field("layout", &self.layout()); + #[cfg(feature = "1.10.0")] + formatter.field("chunk_opts", &self.chunk_opts()); + formatter.field("external", &self.external()); + #[cfg(feature = "1.10.0")] + formatter.field("virtual_map", &self.virtual_map()); + formatter.field("obj_track_times", &self.obj_track_times()); + formatter.field("attr_phase_change", &self.attr_phase_change()); + formatter.field("attr_creation_order", &self.attr_creation_order()); + formatter.finish() + } +} + +impl Deref for DatasetCreate { + type Target = PropertyList; + + fn deref(&self) -> &PropertyList { + unsafe { self.transmute() } + } +} + +impl PartialEq for DatasetCreate { + fn eq(&self, other: &Self) -> bool { + ::eq(self, other) + } +} + +impl Eq for DatasetCreate {} + +impl Clone for DatasetCreate { + fn clone(&self) -> Self { + unsafe { self.deref().clone().cast_unchecked() } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum Layout { + Compact, + Contiguous, + Chunked, + #[cfg(feature = "1.10.0")] + Virtual, +} + +impl Default for Layout { + fn default() -> Self { + Self::Contiguous + } +} + +impl From for Layout { + fn from(layout: H5D_layout_t) -> Self { + match layout { + H5D_layout_t::H5D_COMPACT => Self::Compact, + H5D_layout_t::H5D_CHUNKED => Self::Chunked, + #[cfg(feature = "1.10.0")] + H5D_layout_t::H5D_VIRTUAL => Self::Virtual, + _ => Self::Contiguous, + } + } +} + +impl From for H5D_layout_t { + fn from(layout: Layout) -> Self { + match layout { + Layout::Compact => Self::H5D_COMPACT, + Layout::Chunked => Self::H5D_CHUNKED, + #[cfg(feature = "1.10.0")] + Layout::Virtual => Self::H5D_VIRTUAL, + Layout::Contiguous => Self::H5D_CONTIGUOUS, + } + } +} + +#[cfg(feature = "1.10.0")] +bitflags! { + pub struct ChunkOpts: u32 { + const DONT_FILTER_PARTIAL_CHUNKS = H5D_CHUNK_DONT_FILTER_PARTIAL_CHUNKS; + } +} + +#[cfg(feature = "1.10.0")] +impl Default for ChunkOpts { + fn default() -> Self { + Self::DONT_FILTER_PARTIAL_CHUNKS + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum AllocTime { + Early, + Incr, + Late, +} + +impl From for AllocTime { + fn from(alloc_time: H5D_alloc_time_t) -> Self { + match alloc_time { + H5D_alloc_time_t::H5D_ALLOC_TIME_EARLY => Self::Early, + H5D_alloc_time_t::H5D_ALLOC_TIME_INCR => Self::Incr, + _ => Self::Late, + } + } +} + +impl From for H5D_alloc_time_t { + fn from(alloc_time: AllocTime) -> Self { + match alloc_time { + AllocTime::Early => Self::H5D_ALLOC_TIME_EARLY, + AllocTime::Incr => Self::H5D_ALLOC_TIME_INCR, + AllocTime::Late => Self::H5D_ALLOC_TIME_LATE, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum FillTime { + IfSet, + Alloc, + Never, +} + +impl Default for FillTime { + fn default() -> Self { + Self::IfSet + } +} + +impl From for FillTime { + fn from(fill_time: H5D_fill_time_t) -> Self { + match fill_time { + H5D_fill_time_t::H5D_FILL_TIME_IFSET => Self::IfSet, + H5D_fill_time_t::H5D_FILL_TIME_ALLOC => Self::Alloc, + _ => Self::Never, + } + } +} + +impl From for H5D_fill_time_t { + fn from(fill_time: FillTime) -> Self { + match fill_time { + FillTime::IfSet => Self::H5D_FILL_TIME_IFSET, + FillTime::Alloc => Self::H5D_FILL_TIME_ALLOC, + FillTime::Never => Self::H5D_FILL_TIME_NEVER, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum FillValue { + Undefined, + Default, + UserDefined, +} + +impl Default for FillValue { + fn default() -> Self { + Self::Default + } +} + +impl From for FillValue { + fn from(fill_value: H5D_fill_value_t) -> Self { + match fill_value { + H5D_fill_value_t::H5D_FILL_VALUE_DEFAULT => Self::Default, + H5D_fill_value_t::H5D_FILL_VALUE_USER_DEFINED => Self::UserDefined, + _ => Self::Undefined, + } + } +} + +impl From for H5D_fill_value_t { + fn from(fill_value: FillValue) -> Self { + match fill_value { + FillValue::Default => Self::H5D_FILL_VALUE_DEFAULT, + FillValue::UserDefined => Self::H5D_FILL_VALUE_USER_DEFINED, + FillValue::Undefined => Self::H5D_FILL_VALUE_UNDEFINED, + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct ExternalFile { + pub name: String, + pub offset: usize, + pub size: usize, +} + +#[cfg(feature = "1.10.0")] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct VirtualMapping { + pub src_filename: String, + pub src_dataset: String, + pub src_extents: Extents, + pub src_selection: Selection, + pub vds_extents: Extents, + pub vds_selection: Selection, +} + +#[cfg(feature = "1.10.0")] +impl VirtualMapping { + pub fn new( + src_filename: F, src_dataset: D, src_extents: E1, src_selection: S1, vds_extents: E2, + vds_selection: S2, + ) -> Self + where + F: AsRef, + D: AsRef, + E1: Into, + S1: Into, + E2: Into, + S2: Into, + { + Self { + src_filename: src_filename.as_ref().into(), + src_dataset: src_dataset.as_ref().into(), + src_extents: src_extents.into(), + src_selection: src_selection.into(), + vds_extents: vds_extents.into(), + vds_selection: vds_selection.into(), + } + } +} + +/// Builder used to create dataset creation property list. +#[derive(Clone, Debug, Default)] +pub struct DatasetCreateBuilder { + filters: Vec, + #[allow(clippy::option_option)] + alloc_time: Option>, + fill_time: Option, + fill_value: Option, + chunk: Option>, + layout: Option, + #[cfg(feature = "1.10.0")] + chunk_opts: Option, + external: Vec, + #[cfg(feature = "1.10.0")] + virtual_map: Vec, + obj_track_times: Option, + attr_phase_change: Option, + attr_creation_order: Option, +} + +impl DatasetCreateBuilder { + /// Creates a new dataset creation property list builder. + pub fn new() -> Self { + Self::default() + } + + /// Creates a new builder from an existing property list. + /// + /// **Note**: the fill value is not copied over (due to its type not being + /// exposed in the property list API). + pub fn from_plist(plist: &DatasetCreate) -> Result { + let mut builder = Self::default(); + builder.set_filters(&plist.get_filters()?); + builder.alloc_time(Some(plist.get_alloc_time()?)); + builder.fill_time(plist.get_fill_time()?); + if let Some(v) = plist.get_chunk()? { + builder.chunk(&v); + } + let layout = plist.get_layout()?; + builder.layout(layout); + #[cfg(feature = "1.10.0")] + { + if let Some(v) = plist.get_chunk_opts()? { + builder.chunk_opts(v); + } + if layout == Layout::Virtual { + for mapping in &plist.get_virtual_map()? { + builder.virtual_map( + &mapping.src_filename, + &mapping.src_dataset, + &mapping.src_extents, + &mapping.src_selection, + &mapping.vds_extents, + &mapping.vds_selection, + ); + } + } + } + for external in &plist.get_external()? { + builder.external(&external.name, external.offset, external.size); + } + builder.obj_track_times(plist.get_obj_track_times()?); + let apc = plist.get_attr_phase_change()?; + builder.attr_phase_change(apc.max_compact, apc.min_dense); + builder.attr_creation_order(plist.get_attr_creation_order()?); + Ok(builder) + } + + pub fn set_filters(&mut self, filters: &[Filter]) -> &mut Self { + self.filters = filters.to_owned(); + self + } + + pub fn deflate(&mut self, level: u8) -> &mut Self { + self.filters.push(Filter::deflate(level)); + self + } + + pub fn shuffle(&mut self) -> &mut Self { + self.filters.push(Filter::shuffle()); + self + } + + pub fn fletcher32(&mut self) -> &mut Self { + self.filters.push(Filter::fletcher32()); + self + } + + pub fn szip(&mut self, coding: SZip, px_per_block: u8) -> &mut Self { + self.filters.push(Filter::szip(coding, px_per_block)); + self + } + + pub fn nbit(&mut self) -> &mut Self { + self.filters.push(Filter::nbit()); + self + } + + pub fn scale_offset(&mut self, mode: ScaleOffset) -> &mut Self { + self.filters.push(Filter::scale_offset(mode)); + self + } + + #[cfg(feature = "lzf")] + pub fn lzf(&mut self) -> &mut Self { + self.filters.push(Filter::lzf()); + self + } + + #[cfg(feature = "blosc")] + /// Enable the blosc filter on this dataset. + /// + /// For efficient compression and decompression on multiple cores a chunk-size + /// of minimum 1MB per core should be selected. + /// For e.g. 16 cores a minimum chunksize of 16MB should allow efficient + /// compression and decompression, although larger chunks might be more efficient. + pub fn blosc(&mut self, complib: Blosc, clevel: u8, shuffle: T) -> &mut Self + where + T: Into, + { + self.filters.push(Filter::blosc(complib, clevel, shuffle)); + self + } + + #[cfg(feature = "blosc")] + pub fn blosc_blosclz(&mut self, clevel: u8, shuffle: T) -> &mut Self + where + T: Into, + { + self.filters.push(Filter::blosc_blosclz(clevel, shuffle)); + self + } + + #[cfg(feature = "blosc")] + pub fn blosc_lz4(&mut self, clevel: u8, shuffle: T) -> &mut Self + where + T: Into, + { + self.filters.push(Filter::blosc_lz4(clevel, shuffle)); + self + } + + #[cfg(feature = "blosc")] + pub fn blosc_lz4hc(&mut self, clevel: u8, shuffle: T) -> &mut Self + where + T: Into, + { + self.filters.push(Filter::blosc_lz4hc(clevel, shuffle)); + self + } + + #[cfg(feature = "blosc")] + pub fn blosc_snappy(&mut self, clevel: u8, shuffle: T) -> &mut Self + where + T: Into, + { + self.filters.push(Filter::blosc_snappy(clevel, shuffle)); + self + } + + #[cfg(feature = "blosc")] + pub fn blosc_zlib(&mut self, clevel: u8, shuffle: T) -> &mut Self + where + T: Into, + { + self.filters.push(Filter::blosc_zlib(clevel, shuffle)); + self + } + + #[cfg(feature = "blosc")] + pub fn blosc_zstd(&mut self, clevel: u8, shuffle: T) -> &mut Self + where + T: Into, + { + self.filters.push(Filter::blosc_zstd(clevel, shuffle)); + self + } + + pub fn add_filter(&mut self, id: H5Z_filter_t, cdata: &[c_uint]) -> &mut Self { + self.filters.push(Filter::user(id, cdata)); + self + } + + pub fn clear_filters(&mut self) -> &mut Self { + self.filters.clear(); + self + } + + pub fn alloc_time(&mut self, alloc_time: Option) -> &mut Self { + self.alloc_time = Some(alloc_time); + self + } + + pub fn fill_time(&mut self, fill_time: FillTime) -> &mut Self { + self.fill_time = Some(fill_time); + self + } + + pub(crate) fn has_fill_time(&self) -> bool { + self.fill_time.is_some() + } + + pub fn fill_value>(&mut self, fill_value: T) -> &mut Self { + self.fill_value = Some(fill_value.into()); + self + } + + pub fn no_fill_value(&mut self) -> &mut Self { + self.fill_value = None; + self + } + + /// Set chunking for the dataset + /// + /// The chunk should match the usage pattern of the dataset. + /// + /// If compression is enabled, it is a good idea to have chunks of sufficient + /// size to allow efficient compression. Chunk sizes of less than 4MB will in + /// most cases be inefficient, and will yield limited space- and time-savings. + pub fn chunk(&mut self, chunk: D) -> &mut Self { + self.chunk = Some(chunk.dims()); + self + } + + pub fn no_chunk(&mut self) -> &mut Self { + self.chunk = None; + self + } + + pub fn layout(&mut self, layout: Layout) -> &mut Self { + self.layout = Some(layout); + self + } + + #[cfg(feature = "1.10.0")] + pub fn chunk_opts(&mut self, opts: ChunkOpts) -> &mut Self { + self.chunk_opts = Some(opts); + self + } + + pub fn external(&mut self, name: &str, offset: usize, size: usize) -> &mut Self { + self.external.push(ExternalFile { name: name.to_owned(), offset, size }); + self + } + + #[cfg(feature = "1.10.0")] + pub fn virtual_map( + &mut self, src_filename: F, src_dataset: D, src_extents: E1, src_selection: S1, + vds_extents: E2, vds_selection: S2, + ) -> &mut Self + where + F: AsRef, + D: AsRef, + E1: Into, + S1: Into, + E2: Into, + S2: Into, + { + self.virtual_map.push(VirtualMapping { + src_filename: src_filename.as_ref().into(), + src_dataset: src_dataset.as_ref().into(), + src_extents: src_extents.into(), + src_selection: src_selection.into(), + vds_extents: vds_extents.into(), + vds_selection: vds_selection.into(), + }); + self + } + + pub fn obj_track_times(&mut self, track_times: bool) -> &mut Self { + self.obj_track_times = Some(track_times); + self + } + + pub fn attr_phase_change(&mut self, max_compact: u32, min_dense: u32) -> &mut Self { + self.attr_phase_change = Some(AttrPhaseChange { max_compact, min_dense }); + self + } + + pub fn attr_creation_order(&mut self, attr_creation_order: AttrCreationOrder) -> &mut Self { + self.attr_creation_order = Some(attr_creation_order); + self + } + + fn populate_plist(&self, id: hid_t) -> Result<()> { + for filter in &self.filters { + filter.apply_to_plist(id)?; + } + if let Some(v) = self.alloc_time { + let v = v.map_or(H5D_alloc_time_t::H5D_ALLOC_TIME_DEFAULT, Into::into); + h5try!(H5Pset_alloc_time(id, v)); + } + if let Some(v) = self.fill_time { + h5try!(H5Pset_fill_time(id, v.into())); + } + if let Some(ref v) = self.fill_value { + let dtype = Datatype::from_descriptor(v.type_descriptor())?; + h5try!(H5Pset_fill_value(id, dtype.id(), v.get_buf().as_ptr().cast())); + } + if let Some(v) = self.layout { + h5try!(H5Pset_layout(id, v.into())); + } + if let Some(ref v) = self.chunk { + let v = v.iter().map(|&x| x as _).collect::>(); + h5try!(H5Pset_chunk(id, v.len() as _, v.as_ptr())); + } + #[cfg(feature = "1.10.0")] + { + if let Some(v) = self.chunk_opts { + h5try!(H5Pset_chunk_opts(id, v.bits() as _)); + } + for v in &self.virtual_map { + let src_filename = to_cstring(v.src_filename.as_str())?; + let src_dataset = to_cstring(v.src_dataset.as_str())?; + let src_space = Dataspace::try_new(&v.src_extents)?.select(&v.src_selection)?; + let vds_space = Dataspace::try_new(&v.vds_extents)?.select(&v.vds_selection)?; + h5try!(H5Pset_virtual( + id, + vds_space.id(), + src_filename.as_ptr(), + src_dataset.as_ptr(), + src_space.id() + )); + } + } + for external in &self.external { + let name = to_cstring(external.name.as_str())?; + let size = if external.size == 0 { H5F_UNLIMITED as _ } else { external.size as _ }; + h5try!(H5Pset_external(id, name.as_ptr(), external.offset as _, size)); + } + if let Some(v) = self.obj_track_times { + h5try!(H5Pset_obj_track_times(id, hbool_t::from(v))); + } + if let Some(v) = self.attr_phase_change { + h5try!(H5Pset_attr_phase_change(id, v.max_compact as _, v.min_dense as _)); + } + if let Some(v) = self.attr_creation_order { + h5try!(H5Pset_attr_creation_order(id, v.bits() as _)); + } + Ok(()) + } + + pub(crate) fn validate_filters(&self, datatype_id: hid_t) -> Result<()> { + validate_filters(&self.filters, h5lock!(H5Tget_class(datatype_id))) + } + + pub(crate) fn has_filters(&self) -> bool { + !self.filters.is_empty() + } + + pub fn apply(&self, plist: &mut DatasetCreate) -> Result<()> { + h5lock!(self.populate_plist(plist.id())) + } + + pub fn finish(&self) -> Result { + h5lock!({ + let mut plist = DatasetCreate::try_new()?; + self.apply(&mut plist).map(|_| plist) + }) + } +} + +/// Dataset creation property list. +impl DatasetCreate { + pub fn try_new() -> Result { + Self::from_id(h5try!(H5Pcreate(*H5P_DATASET_CREATE))) + } + + pub fn copy(&self) -> Self { + unsafe { self.deref().copy().cast_unchecked() } + } + + pub fn build() -> DatasetCreateBuilder { + DatasetCreateBuilder::new() + } + + pub fn all_filters_avail(&self) -> bool { + h5lock!(H5Pall_filters_avail(self.id())) > 0 + } + + #[doc(hidden)] + pub fn get_filters(&self) -> Result> { + Filter::extract_pipeline(self.id()) + } + + pub fn filters(&self) -> Vec { + self.get_filters().unwrap_or_default() + } + + pub fn has_filters(&self) -> bool { + !self.filters().is_empty() + } + + #[doc(hidden)] + pub fn get_alloc_time(&self) -> Result { + h5get!(H5Pget_alloc_time(self.id()): H5D_alloc_time_t).map(Into::into) + } + + pub fn alloc_time(&self) -> AllocTime { + self.get_alloc_time().unwrap_or(AllocTime::Late) + } + + #[doc(hidden)] + pub fn get_fill_time(&self) -> Result { + h5get!(H5Pget_fill_time(self.id()): H5D_fill_time_t).map(Into::into) + } + + pub fn fill_time(&self) -> FillTime { + self.get_fill_time().unwrap_or_default() + } + + #[doc(hidden)] + pub fn get_fill_value_defined(&self) -> Result { + h5get!(H5Pfill_value_defined(self.id()): H5D_fill_value_t).map(Into::into) + } + + pub fn fill_value_defined(&self) -> FillValue { + self.get_fill_value_defined().unwrap_or(FillValue::Undefined) + } + + #[doc(hidden)] + pub fn get_fill_value(&self, tp: &TypeDescriptor) -> Result> { + match self.get_fill_value_defined()? { + FillValue::Default | FillValue::UserDefined => { + let dtype = Datatype::from_descriptor(tp)?; + let mut buf: Vec = Vec::with_capacity(tp.size()); + h5try!(H5Pget_fill_value(self.id(), dtype.id(), buf.as_mut_ptr().cast())); + unsafe { + buf.set_len(tp.size()); + } + Ok(Some(unsafe { OwnedDynValue::from_raw(tp.clone(), buf.into_boxed_slice()) })) + } + FillValue::Undefined => Ok(None), + } + } + + pub fn fill_value(&self, tp: &TypeDescriptor) -> Option { + self.get_fill_value(tp).unwrap_or_default() + } + + #[doc(hidden)] + pub fn get_fill_value_as(&self) -> Result> { + let dtype = Datatype::from_type::()?; + self.get_fill_value(&dtype.to_descriptor()?)? + .map(|value| { + value + .cast::() + .map_err(|_| "The fill value and requested types are not equal".into()) + }) + .transpose() + } + + pub fn fill_value_as(&self) -> Option { + self.get_fill_value_as::().unwrap_or_default() + } + + #[doc(hidden)] + pub fn get_chunk(&self) -> Result>> { + if self.get_layout()? == Layout::Chunked { + let ndims = h5try!(H5Pget_chunk(self.id(), 0, ptr::null_mut())); + let mut buf: Vec = vec![0; ndims as usize]; + h5try!(H5Pget_chunk(self.id(), ndims, buf.as_mut_ptr())); + Ok(Some(buf.into_iter().map(|x| x as _).collect())) + } else { + Ok(None) + } + } + + pub fn chunk(&self) -> Option> { + self.get_chunk().unwrap_or_default() + } + + #[doc(hidden)] + pub fn get_layout(&self) -> Result { + let layout = h5lock!(H5Pget_layout(self.id())); + h5check(layout as c_int)?; + Ok(layout.into()) + } + + pub fn layout(&self) -> Layout { + self.get_layout().unwrap_or_default() + } + + #[cfg(feature = "1.10.0")] + #[doc(hidden)] + pub fn get_chunk_opts(&self) -> Result> { + if self.get_layout()? == Layout::Chunked { + let opts = h5get!(H5Pget_chunk_opts(self.id()): c_uint)?; + Ok(Some(ChunkOpts::from_bits_truncate(opts as _))) + } else { + Ok(None) + } + } + + #[cfg(feature = "1.10.0")] + pub fn chunk_opts(&self) -> Option { + self.get_chunk_opts().unwrap_or_default() + } + + #[doc(hidden)] + pub fn get_external(&self) -> Result> { + const NAME_LEN: usize = 1024; + h5lock!({ + let mut external = Vec::new(); + let count = h5try!(H5Pget_external_count(self.id())); + let mut name: Vec = vec![0; NAME_LEN + 1]; + for idx in 0..count { + let mut offset: libc::off_t = 0; + let mut size: hsize_t = 0; + h5try!(H5Pget_external( + self.id(), + idx as _, + NAME_LEN as _, + name.as_mut_ptr(), + &mut offset as *mut _, + &mut size as *mut _, + )); + #[allow(clippy::absurd_extreme_comparisons)] + external.push(ExternalFile { + name: string_from_cstr(name.as_ptr()), + offset: offset as _, + size: if size >= H5F_UNLIMITED { 0 } else { size as _ }, + }); + } + Ok(external) + }) + } + + pub fn external(&self) -> Vec { + self.get_external().unwrap_or_default() + } + + #[cfg(feature = "1.10.0")] + #[doc(hidden)] + pub fn get_virtual_map(&self) -> Result> { + sync(|| unsafe { + let id = self.id(); + let n_virtual = h5get!(H5Pget_virtual_count(id): size_t)? as _; + let mut virtual_map = Vec::with_capacity(n_virtual); + + for i in 0..n_virtual { + let src_filename = get_h5_str(|s, n| H5Pget_virtual_filename(id, i, s, n))?; + let src_dataset = get_h5_str(|s, n| H5Pget_virtual_dsetname(id, i, s, n))?; + + let src_space_id = h5check(H5Pget_virtual_srcspace(id, i))?; + let src_space = Dataspace::from_id(src_space_id)?; + let src_extents = src_space.extents()?; + let src_selection = src_space.get_selection()?; + + let vds_space_id = h5check(H5Pget_virtual_vspace(id, i))?; + let vds_space = Dataspace::from_id(vds_space_id)?; + let vds_extents = vds_space.extents()?; + let vds_selection = vds_space.get_selection()?; + + virtual_map.push(VirtualMapping { + src_filename, + src_dataset, + src_extents, + src_selection, + vds_extents, + vds_selection, + }); + } + + Ok(virtual_map) + }) + } + + #[cfg(feature = "1.10.0")] + pub fn virtual_map(&self) -> Vec { + self.get_virtual_map().unwrap_or_default() + } + + #[doc(hidden)] + pub fn get_obj_track_times(&self) -> Result { + h5get!(H5Pget_obj_track_times(self.id()): hbool_t).map(|x| x > 0) + } + + pub fn obj_track_times(&self) -> bool { + self.get_obj_track_times().unwrap_or(true) + } + + #[doc(hidden)] + pub fn get_attr_phase_change(&self) -> Result { + h5get!(H5Pget_attr_phase_change(self.id()): c_uint, c_uint) + .map(|(mc, md)| AttrPhaseChange { max_compact: mc as _, min_dense: md as _ }) + } + + pub fn attr_phase_change(&self) -> AttrPhaseChange { + self.get_attr_phase_change().unwrap_or_default() + } + + #[doc(hidden)] + pub fn get_attr_creation_order(&self) -> Result { + h5get!(H5Pget_attr_creation_order(self.id()): c_uint) + .map(AttrCreationOrder::from_bits_truncate) + } + + pub fn attr_creation_order(&self) -> AttrCreationOrder { + self.get_attr_creation_order().unwrap_or_default() + } +} diff --git a/src/hl/plist/file_access.rs b/src/hl/plist/file_access.rs index d6582e020..b8d6db75b 100644 --- a/src/hl/plist/file_access.rs +++ b/src/hl/plist/file_access.rs @@ -42,38 +42,38 @@ use hdf5_sys::h5p::{ H5Pset_gc_references, H5Pset_mdc_config, H5Pset_meta_block_size, H5Pset_sieve_buf_size, H5Pset_small_data_block_size, }; -#[cfg(h5_have_direct)] +#[cfg(feature = "have-direct")] use hdf5_sys::h5p::{H5Pget_fapl_direct, H5Pset_fapl_direct}; #[cfg(feature = "mpio")] use hdf5_sys::h5p::{H5Pget_fapl_mpio, H5Pset_fapl_mpio}; -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] use hdf5_sys::h5ac::{H5AC_cache_image_config_t, H5AC__CACHE_IMAGE__ENTRY_AGEOUT__NONE}; -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] use hdf5_sys::h5f::H5F_libver_t; -#[cfg(all(hdf5_1_10_0, h5_have_parallel))] +#[cfg(all(feature = "1.10.0", feature = "have-parallel"))] use hdf5_sys::h5p::{ H5Pget_all_coll_metadata_ops, H5Pget_coll_metadata_write, H5Pset_all_coll_metadata_ops, H5Pset_coll_metadata_write, }; -#[cfg(hdf5_1_8_13)] +#[cfg(feature = "1.8.13")] use hdf5_sys::h5p::{H5Pget_core_write_tracking, H5Pset_core_write_tracking}; -#[cfg(hdf5_1_8_7)] +#[cfg(feature = "1.8.7")] use hdf5_sys::h5p::{H5Pget_elink_file_cache_size, H5Pset_elink_file_cache_size}; -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] use hdf5_sys::h5p::{ H5Pget_evict_on_close, H5Pget_mdc_image_config, H5Pget_page_buffer_size, H5Pset_evict_on_close, H5Pset_mdc_image_config, H5Pset_page_buffer_size, }; -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] use hdf5_sys::h5p::{H5Pget_libver_bounds, H5Pset_libver_bounds}; -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] use hdf5_sys::h5p::{ H5Pget_mdc_log_options, H5Pget_metadata_read_attempts, H5Pset_mdc_log_options, H5Pset_metadata_read_attempts, }; -#[cfg(h5_have_direct)] +#[cfg(feature = "have-direct")] use crate::globals::H5FD_DIRECT; #[cfg(feature = "mpio")] use crate::globals::H5FD_MPIO; @@ -109,32 +109,31 @@ impl ObjectClass for FileAccess { impl Debug for FileAccess { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let _e = silence_errors(); let mut formatter = f.debug_struct("FileAccess"); formatter.field("alignment", &self.alignment()); formatter.field("chunk_cache", &self.chunk_cache()); formatter.field("fclose_degree", &self.fclose_degree()); formatter.field("gc_references", &self.gc_references()); formatter.field("small_data_block_size", &self.small_data_block_size()); - #[cfg(hdf5_1_10_2)] + #[cfg(feature = "1.10.2")] formatter.field("libver_bounds", &self.libver_bounds()); - #[cfg(hdf5_1_8_7)] + #[cfg(feature = "1.8.7")] formatter.field("elink_file_cache_size", &self.elink_file_cache_size()); formatter.field("meta_block_size", &self.meta_block_size()); - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] formatter.field("page_buffer_size", &self.page_buffer_size()); - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] formatter.field("evict_on_close", &self.evict_on_close()); - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] formatter.field("mdc_image_config", &self.mdc_image_config()); formatter.field("sieve_buf_size", &self.sieve_buf_size()); - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] formatter.field("metadata_read_attempts", &self.metadata_read_attempts()); - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] formatter.field("mdc_log_options", &self.mdc_log_options()); - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] formatter.field("all_coll_metadata_ops", &self.all_coll_metadata_ops()); - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] formatter.field("coll_metadata_write", &self.coll_metadata_write()); formatter.field("mdc_config", &self.mdc_config()); formatter.field("driver", &self.driver()); @@ -160,7 +159,7 @@ impl Eq for FileAccess {} impl Clone for FileAccess { fn clone(&self) -> Self { - unsafe { self.deref().clone().cast() } + unsafe { self.deref().clone().cast_unchecked() } } } @@ -168,7 +167,7 @@ impl Clone for FileAccess { pub struct CoreDriver { pub increment: usize, pub filebacked: bool, - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] pub write_tracking: usize, } @@ -177,7 +176,7 @@ impl Default for CoreDriver { Self { increment: 1024 * 1024, filebacked: false, - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] write_tracking: 0, } } @@ -224,19 +223,19 @@ bitflags! { } } -#[derive(Clone, Debug, PartialEq, Eq)] +impl Default for LogFlags { + fn default() -> Self { + Self::LOC_IO + } +} + +#[derive(Clone, Default, Debug, PartialEq, Eq)] pub struct LogOptions { logfile: Option, flags: LogFlags, buf_size: usize, } -impl Default for LogOptions { - fn default() -> Self { - Self { logfile: None, flags: LogFlags::LOC_IO, buf_size: 0 } - } -} - static FD_MEM_TYPES: &[H5F_mem_t] = &[ H5F_mem_t::H5FD_MEM_DEFAULT, H5F_mem_t::H5FD_MEM_SUPER, @@ -366,7 +365,7 @@ impl SplitDriver { mem_lheap: 0, mem_object: 0, }; - if cfg!(hdf5_1_8_10) { + if cfg!(feature = "1.8.10") { layout.mem_gheap = 1; // was changed in 1.8.10 } let is_split = drv.relax @@ -452,7 +451,7 @@ mod mpio { #[cfg(feature = "mpio")] pub use self::mpio::*; -#[cfg(h5_have_direct)] +#[cfg(feature = "have-direct")] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub struct DirectDriver { pub alignment: usize, @@ -460,7 +459,7 @@ pub struct DirectDriver { pub cbuf_size: usize, } -#[cfg(h5_have_direct)] +#[cfg(feature = "have-direct")] impl Default for DirectDriver { fn default() -> Self { Self { alignment: 4096, block_size: 4096, cbuf_size: 16 * 1024 * 1024 } @@ -478,7 +477,7 @@ pub enum FileDriver { Split(SplitDriver), #[cfg(feature = "mpio")] Mpio(MpioDriver), - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] Direct(DirectDriver), } @@ -507,13 +506,13 @@ impl From for FileCloseDegree { } } -impl Into for FileCloseDegree { - fn into(self) -> H5F_close_degree_t { - match self { - Self::Weak => H5F_close_degree_t::H5F_CLOSE_WEAK, - Self::Semi => H5F_close_degree_t::H5F_CLOSE_SEMI, - Self::Strong => H5F_close_degree_t::H5F_CLOSE_STRONG, - Self::Default => H5F_close_degree_t::H5F_CLOSE_DEFAULT, +impl From for H5F_close_degree_t { + fn from(v: FileCloseDegree) -> Self { + match v { + FileCloseDegree::Weak => Self::H5F_CLOSE_WEAK, + FileCloseDegree::Semi => Self::H5F_CLOSE_SEMI, + FileCloseDegree::Strong => Self::H5F_CLOSE_STRONG, + FileCloseDegree::Default => Self::H5F_CLOSE_DEFAULT, } } } @@ -545,19 +544,13 @@ impl Default for ChunkCache { impl Eq for ChunkCache {} -#[derive(Clone, Copy, Debug, PartialEq, Eq)] +#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)] pub struct PageBufferSize { pub buf_size: usize, pub min_meta_perc: u32, pub min_raw_perc: u32, } -impl Default for PageBufferSize { - fn default() -> Self { - Self { buf_size: 0, min_meta_perc: 0, min_raw_perc: 0 } - } -} - #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum CacheIncreaseMode { Off, @@ -573,11 +566,11 @@ impl From for CacheIncreaseMode { } } -impl Into for CacheIncreaseMode { - fn into(self) -> H5C_cache_incr_mode { - match self { - Self::Threshold => H5C_cache_incr_mode::H5C_incr__threshold, - Self::Off => H5C_cache_incr_mode::H5C_incr__off, +impl From for H5C_cache_incr_mode { + fn from(v: CacheIncreaseMode) -> Self { + match v { + CacheIncreaseMode::Threshold => Self::H5C_incr__threshold, + CacheIncreaseMode::Off => Self::H5C_incr__off, } } } @@ -597,11 +590,11 @@ impl From for FlashIncreaseMode { } } -impl Into for FlashIncreaseMode { - fn into(self) -> H5C_cache_flash_incr_mode { - match self { - Self::AddSpace => H5C_cache_flash_incr_mode::H5C_flash_incr__add_space, - Self::Off => H5C_cache_flash_incr_mode::H5C_flash_incr__off, +impl From for H5C_cache_flash_incr_mode { + fn from(v: FlashIncreaseMode) -> Self { + match v { + FlashIncreaseMode::AddSpace => Self::H5C_flash_incr__add_space, + FlashIncreaseMode::Off => Self::H5C_flash_incr__off, } } } @@ -625,13 +618,13 @@ impl From for CacheDecreaseMode { } } -impl Into for CacheDecreaseMode { - fn into(self) -> H5C_cache_decr_mode { - match self { - Self::Threshold => H5C_cache_decr_mode::H5C_decr__threshold, - Self::AgeOut => H5C_cache_decr_mode::H5C_decr__age_out, - Self::AgeOutWithThreshold => H5C_cache_decr_mode::H5C_decr__age_out_with_threshold, - Self::Off => H5C_cache_decr_mode::H5C_decr__off, +impl From for H5C_cache_decr_mode { + fn from(v: CacheDecreaseMode) -> Self { + match v { + CacheDecreaseMode::Threshold => Self::H5C_decr__threshold, + CacheDecreaseMode::AgeOut => Self::H5C_decr__age_out, + CacheDecreaseMode::AgeOutWithThreshold => Self::H5C_decr__age_out_with_threshold, + CacheDecreaseMode::Off => Self::H5C_decr__off, } } } @@ -657,11 +650,11 @@ impl From for MetadataWriteStrategy { } } -impl Into for MetadataWriteStrategy { - fn into(self) -> c_int { - match self { - Self::Distributed => H5AC_METADATA_WRITE_STRATEGY__DISTRIBUTED, - Self::ProcessZeroOnly => H5AC_METADATA_WRITE_STRATEGY__PROCESS_0_ONLY, +impl From for c_int { + fn from(v: MetadataWriteStrategy) -> Self { + match v { + MetadataWriteStrategy::Distributed => H5AC_METADATA_WRITE_STRATEGY__DISTRIBUTED, + MetadataWriteStrategy::ProcessZeroOnly => H5AC_METADATA_WRITE_STRATEGY__PROCESS_0_ONLY, } } } @@ -703,8 +696,8 @@ impl Eq for MetadataCacheConfig {} impl Default for MetadataCacheConfig { fn default() -> Self { - let min_clean_fraction = if cfg!(h5_have_parallel) { 0.3_f32 } else { 0.01_f32 }; - let flash_multiple = if cfg!(h5_have_parallel) { 1.0_f32 } else { 1.4_f32 }; + let min_clean_fraction = if cfg!(feature = "have-parallel") { 0.3_f32 } else { 0.01_f32 }; + let flash_multiple = if cfg!(feature = "have-parallel") { 1.0_f32 } else { 1.4_f32 }; Self { rpt_fcn_enabled: false, open_trace_file: false, @@ -739,42 +732,42 @@ impl Default for MetadataCacheConfig { } } -impl Into for MetadataCacheConfig { - fn into(self) -> H5AC_cache_config_t { +impl From for H5AC_cache_config_t { + fn from(v: MetadataCacheConfig) -> Self { const N: usize = H5AC__MAX_TRACE_FILE_NAME_LEN; let mut trace_file_name: [c_char; N + 1] = unsafe { mem::zeroed() }; - string_to_fixed_bytes(&self.trace_file_name, &mut trace_file_name[..N]); - H5AC_cache_config_t { + string_to_fixed_bytes(&v.trace_file_name, &mut trace_file_name[..N]); + Self { version: H5AC__CURR_CACHE_CONFIG_VERSION, - rpt_fcn_enabled: self.rpt_fcn_enabled as _, - open_trace_file: self.open_trace_file as _, - close_trace_file: self.close_trace_file as _, + rpt_fcn_enabled: hbool_t::from(v.rpt_fcn_enabled), + open_trace_file: hbool_t::from(v.open_trace_file), + close_trace_file: hbool_t::from(v.close_trace_file), trace_file_name, - evictions_enabled: self.evictions_enabled as _, - set_initial_size: self.set_initial_size as _, - initial_size: self.initial_size as _, - min_clean_fraction: self.min_clean_fraction as _, - max_size: self.max_size as _, - min_size: self.min_size as _, - epoch_length: self.epoch_length as _, - incr_mode: self.incr_mode.into(), - lower_hr_threshold: self.lower_hr_threshold as _, - increment: self.increment as _, - apply_max_increment: self.apply_max_increment as _, - max_increment: self.max_increment as _, - flash_incr_mode: self.flash_incr_mode.into(), - flash_multiple: self.flash_multiple as _, - flash_threshold: self.flash_threshold as _, - decr_mode: self.decr_mode.into(), - upper_hr_threshold: self.upper_hr_threshold as _, - decrement: self.decrement as _, - apply_max_decrement: self.apply_max_decrement as _, - max_decrement: self.max_decrement as _, - epochs_before_eviction: self.epochs_before_eviction as _, - apply_empty_reserve: self.apply_empty_reserve as _, - empty_reserve: self.empty_reserve as _, - dirty_bytes_threshold: self.dirty_bytes_threshold as _, - metadata_write_strategy: self.metadata_write_strategy.into(), + evictions_enabled: hbool_t::from(v.evictions_enabled), + set_initial_size: hbool_t::from(v.set_initial_size), + initial_size: v.initial_size as _, + min_clean_fraction: v.min_clean_fraction as _, + max_size: v.max_size as _, + min_size: v.min_size as _, + epoch_length: v.epoch_length as _, + incr_mode: v.incr_mode.into(), + lower_hr_threshold: v.lower_hr_threshold as _, + increment: v.increment as _, + apply_max_increment: hbool_t::from(v.apply_max_increment), + max_increment: v.max_increment as _, + flash_incr_mode: v.flash_incr_mode.into(), + flash_multiple: v.flash_multiple as _, + flash_threshold: v.flash_threshold as _, + decr_mode: v.decr_mode.into(), + upper_hr_threshold: v.upper_hr_threshold as _, + decrement: v.decrement as _, + apply_max_decrement: hbool_t::from(v.apply_max_decrement), + max_decrement: v.max_decrement as _, + epochs_before_eviction: v.epochs_before_eviction as _, + apply_empty_reserve: hbool_t::from(v.apply_empty_reserve), + empty_reserve: v.empty_reserve as _, + dirty_bytes_threshold: v.dirty_bytes_threshold as _, + metadata_write_strategy: v.metadata_write_strategy.into(), } } } @@ -817,7 +810,7 @@ impl From for MetadataCacheConfig { } } -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] mod cache_image_config { use super::*; @@ -838,13 +831,13 @@ mod cache_image_config { } } - impl Into for CacheImageConfig { - fn into(self) -> H5AC_cache_image_config_t { - H5AC_cache_image_config_t { + impl From for H5AC_cache_image_config_t { + fn from(v: CacheImageConfig) -> Self { + Self { version: H5AC__CURR_CACHE_CONFIG_VERSION, - generate_image: self.generate_image as _, - save_resize_status: self.save_resize_status as _, - entry_ageout: self.entry_ageout as _, + generate_image: hbool_t::from(v.generate_image), + save_resize_status: hbool_t::from(v.save_resize_status), + entry_ageout: v.entry_ageout as _, } } } @@ -860,10 +853,10 @@ mod cache_image_config { } } -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] pub use self::cache_image_config::*; -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] #[derive(Clone, Debug, PartialEq, Eq)] pub struct CacheLogOptions { pub is_enabled: bool, @@ -871,14 +864,14 @@ pub struct CacheLogOptions { pub start_on_access: bool, } -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] impl Default for CacheLogOptions { fn default() -> Self { Self { is_enabled: false, location: "".into(), start_on_access: false } } } -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] mod libver { use super::*; @@ -899,12 +892,12 @@ mod libver { } } - impl Into for LibraryVersion { - fn into(self) -> H5F_libver_t { - match self { - Self::V18 => H5F_libver_t::H5F_LIBVER_V18, - Self::V110 => H5F_libver_t::H5F_LIBVER_V110, - _ => H5F_libver_t::H5F_LIBVER_EARLIEST, + impl From for H5F_libver_t { + fn from(v: LibraryVersion) -> Self { + match v { + LibraryVersion::V18 => Self::H5F_LIBVER_V18, + LibraryVersion::V110 => Self::H5F_LIBVER_V110, + LibraryVersion::Earliest => Self::H5F_LIBVER_EARLIEST, } } } @@ -925,14 +918,26 @@ mod libver { pub high: LibraryVersion, } + impl LibVerBounds { + pub const fn new(low: LibraryVersion, high: LibraryVersion) -> Self { + Self { low, high } + } + } + impl Default for LibVerBounds { fn default() -> Self { Self { low: LibraryVersion::Earliest, high: LibraryVersion::latest() } } } + + impl From for LibVerBounds { + fn from(version: LibraryVersion) -> Self { + Self { low: version, high: LibraryVersion::latest() } + } + } } -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] pub use self::libver::*; /// Builder used to create file access property list. @@ -940,33 +945,33 @@ pub use self::libver::*; pub struct FileAccessBuilder { file_driver: Option, log_options: LogOptions, - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] write_tracking: Option, fclose_degree: Option, alignment: Option, chunk_cache: Option, - #[cfg(hdf5_1_8_7)] + #[cfg(feature = "1.8.7")] elink_file_cache_size: Option, meta_block_size: Option, - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] page_buffer_size: Option, sieve_buf_size: Option, - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] evict_on_close: Option, - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] metadata_read_attempts: Option, mdc_config: Option, - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] mdc_image_config: Option, - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] mdc_log_options: Option, - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] all_coll_metadata_ops: Option, - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] coll_metadata_write: Option, gc_references: Option, small_data_block_size: Option, - #[cfg(hdf5_1_10_2)] + #[cfg(feature = "1.10.2")] libver_bounds: Option, } @@ -988,17 +993,17 @@ impl FileAccessBuilder { builder.driver(&drv); builder.gc_references(plist.get_gc_references()?); builder.small_data_block_size(plist.get_small_data_block_size()?); - #[cfg(hdf5_1_10_2)] + #[cfg(feature = "1.10.2")] { let v = plist.get_libver_bounds()?; builder.libver_bounds(v.low, v.high); } - #[cfg(hdf5_1_8_7)] + #[cfg(feature = "1.8.7")] { builder.elink_file_cache_size(plist.get_elink_file_cache_size()?); } builder.meta_block_size(plist.get_meta_block_size()?); - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] { let v = plist.get_page_buffer_size()?; builder.page_buffer_size(v.buf_size, v.min_meta_perc, v.min_raw_perc); @@ -1006,19 +1011,19 @@ impl FileAccessBuilder { builder.mdc_image_config(plist.get_mdc_image_config()?.generate_image); } builder.sieve_buf_size(plist.get_sieve_buf_size()?); - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] { builder.metadata_read_attempts(plist.get_metadata_read_attempts()?); let v = plist.get_mdc_log_options()?; builder.mdc_log_options(v.is_enabled, &v.location, v.start_on_access); } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] { builder.all_coll_metadata_ops(plist.get_all_coll_metadata_ops()?); builder.coll_metadata_write(plist.get_coll_metadata_write()?); } builder.mdc_config(&plist.get_mdc_config()?); - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] { if let FileDriver::Core(ref drv) = drv { builder.write_tracking(drv.write_tracking); @@ -1027,6 +1032,11 @@ impl FileAccessBuilder { Ok(builder) } + /// Sets the file close degree + /// + /// If called with `FileCloseDegree::Strong`, the programmer is responsible + /// for closing all items before closing the file. Failure to do so might + /// invalidate newly created objects. pub fn fclose_degree(&mut self, fc_degree: FileCloseDegree) -> &mut Self { self.fclose_degree = Some(fc_degree); self @@ -1042,7 +1052,7 @@ impl FileAccessBuilder { self } - #[cfg(hdf5_1_8_7)] + #[cfg(feature = "1.8.7")] pub fn elink_file_cache_size(&mut self, efc_size: u32) -> &mut Self { self.elink_file_cache_size = Some(efc_size); self @@ -1053,7 +1063,7 @@ impl FileAccessBuilder { self } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn page_buffer_size( &mut self, buf_size: usize, min_meta_perc: u32, min_raw_perc: u32, ) -> &mut Self { @@ -1066,13 +1076,13 @@ impl FileAccessBuilder { self } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn evict_on_close(&mut self, evict_on_close: bool) -> &mut Self { self.evict_on_close = Some(evict_on_close); self } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub fn metadata_read_attempts(&mut self, attempts: u32) -> &mut Self { self.metadata_read_attempts = Some(attempts); self @@ -1083,7 +1093,7 @@ impl FileAccessBuilder { self } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn mdc_image_config(&mut self, generate_image: bool) -> &mut Self { self.mdc_image_config = Some(CacheImageConfig { generate_image, @@ -1093,7 +1103,7 @@ impl FileAccessBuilder { self } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub fn mdc_log_options( &mut self, is_enabled: bool, location: &str, start_on_access: bool, ) -> &mut Self { @@ -1102,13 +1112,13 @@ impl FileAccessBuilder { self } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] pub fn all_coll_metadata_ops(&mut self, is_collective: bool) -> &mut Self { self.all_coll_metadata_ops = Some(is_collective); self } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] pub fn coll_metadata_write(&mut self, is_collective: bool) -> &mut Self { self.coll_metadata_write = Some(is_collective); self @@ -1124,12 +1134,32 @@ impl FileAccessBuilder { self } - #[cfg(hdf5_1_10_2)] + #[cfg(feature = "1.10.2")] pub fn libver_bounds(&mut self, low: LibraryVersion, high: LibraryVersion) -> &mut Self { self.libver_bounds = Some(LibVerBounds { low, high }); self } + #[cfg(feature = "1.10.2")] + pub fn libver_earliest(&mut self) -> &mut Self { + self.libver_bounds(LibraryVersion::Earliest, LibraryVersion::latest()) + } + + #[cfg(feature = "1.10.2")] + pub fn libver_v18(&mut self) -> &mut Self { + self.libver_bounds(LibraryVersion::V18, LibraryVersion::latest()) + } + + #[cfg(feature = "1.10.2")] + pub fn libver_v110(&mut self) -> &mut Self { + self.libver_bounds(LibraryVersion::V110, LibraryVersion::latest()) + } + + #[cfg(feature = "1.10.2")] + pub fn libver_latest(&mut self) -> &mut Self { + self.libver_bounds(LibraryVersion::latest(), LibraryVersion::latest()) + } + pub fn driver(&mut self, file_driver: &FileDriver) -> &mut Self { self.file_driver = Some(file_driver.clone()); self @@ -1157,15 +1187,12 @@ impl FileAccessBuilder { } pub fn core_options(&mut self, increment: usize, filebacked: bool) -> &mut Self { - let mut drv = CoreDriver::default(); - drv.increment = increment; - drv.filebacked = filebacked; + let drv = CoreDriver { increment, filebacked, ..CoreDriver::default() }; self.driver(&FileDriver::Core(drv)) } pub fn core_filebacked(&mut self, filebacked: bool) -> &mut Self { - let mut drv = CoreDriver::default(); - drv.filebacked = filebacked; + let drv = CoreDriver { filebacked, ..CoreDriver::default() }; self.driver(&FileDriver::Core(drv)) } @@ -1173,7 +1200,7 @@ impl FileAccessBuilder { self.driver(&FileDriver::Core(CoreDriver::default())) } - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] pub fn write_tracking(&mut self, page_size: usize) -> &mut Self { self.write_tracking = Some(page_size); self @@ -1218,14 +1245,14 @@ impl FileAccessBuilder { self.driver(&FileDriver::Mpio(MpioDriver::try_new(comm, info).unwrap())) } - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] pub fn direct_options( &mut self, alignment: usize, block_size: usize, cbuf_size: usize, ) -> &mut Self { self.driver(&FileDriver::Direct(DirectDriver { alignment, block_size, cbuf_size })) } - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] pub fn direct(&mut self) -> &mut Self { self.driver(&FileDriver::Direct(DirectDriver::default())) } @@ -1244,11 +1271,15 @@ impl FileAccessBuilder { } fn set_core(&self, id: hid_t, drv: &CoreDriver) -> Result<()> { - h5try!(H5Pset_fapl_core(id, drv.increment as _, drv.filebacked as _)); - #[cfg(hdf5_1_8_13)] + h5try!(H5Pset_fapl_core(id, drv.increment as _, hbool_t::from(drv.filebacked))); + #[cfg(feature = "1.8.13")] { if let Some(page_size) = self.write_tracking { - h5try!(H5Pset_core_write_tracking(id, (page_size > 0) as _, page_size.max(1) as _)); + h5try!(H5Pset_core_write_tracking( + id, + hbool_t::from(page_size > 0), + page_size.max(1) as _ + )); } } Ok(()) @@ -1302,7 +1333,7 @@ impl FileAccessBuilder { memb_fapl.as_ptr(), memb_name.as_ptr(), memb_addr.as_ptr(), - drv.relax as _, + hbool_t::from(drv.relax), )); Ok(()) @@ -1327,7 +1358,7 @@ impl FileAccessBuilder { Ok(()) } - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] fn set_direct(id: hid_t, drv: &DirectDriver) -> Result<()> { h5try!(H5Pset_fapl_direct(id, drv.alignment as _, drv.block_size as _, drv.cbuf_size as _)); Ok(()) @@ -1360,7 +1391,7 @@ impl FileAccessBuilder { FileDriver::Mpio(drv) => { Self::set_mpio(id, drv)?; } - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] FileDriver::Direct(drv) => { Self::set_direct(id, drv)?; } @@ -1378,22 +1409,24 @@ impl FileAccessBuilder { if let Some(v) = self.chunk_cache { h5try!(H5Pset_cache(id, 0, v.nslots as _, v.nbytes as _, v.w0 as _)); } + // The default is to use CLOSE_SEMI or CLOSE_WEAK, depending on VFL driver. + // Both of these are unproblematic for our ownership if let Some(v) = self.fclose_degree { h5try!(H5Pset_fclose_degree(id, v.into())); } if let Some(v) = self.gc_references { - h5try!(H5Pset_gc_references(id, v as _)); + h5try!(H5Pset_gc_references(id, c_uint::from(v))); } if let Some(v) = self.small_data_block_size { h5try!(H5Pset_small_data_block_size(id, v as _)); } - #[cfg(hdf5_1_10_2)] + #[cfg(feature = "1.10.2")] { if let Some(v) = self.libver_bounds { h5try!(H5Pset_libver_bounds(id, v.low.into(), v.high.into())); } } - #[cfg(hdf5_1_8_7)] + #[cfg(feature = "1.8.7")] { if let Some(v) = self.elink_file_cache_size { h5try!(H5Pset_elink_file_cache_size(id, v as _)); @@ -1402,7 +1435,7 @@ impl FileAccessBuilder { if let Some(v) = self.meta_block_size { h5try!(H5Pset_meta_block_size(id, v as _)); } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] { if let Some(v) = self.page_buffer_size { h5try!(H5Pset_page_buffer_size( @@ -1413,7 +1446,7 @@ impl FileAccessBuilder { )); } if let Some(v) = self.evict_on_close { - h5try!(H5Pset_evict_on_close(id, v as _)); + h5try!(H5Pset_evict_on_close(id, hbool_t::from(v))); } if let Some(v) = self.mdc_image_config { h5try!(H5Pset_mdc_image_config(id, &v.into() as *const _)); @@ -1422,7 +1455,7 @@ impl FileAccessBuilder { if let Some(v) = self.sieve_buf_size { h5try!(H5Pset_sieve_buf_size(id, v as _)); } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] { if let Some(v) = self.metadata_read_attempts { h5try!(H5Pset_metadata_read_attempts(id, v as _)); @@ -1431,13 +1464,13 @@ impl FileAccessBuilder { let location = to_cstring(v.location.as_ref())?; h5try!(H5Pset_mdc_log_options( id, - v.is_enabled as _, + hbool_t::from(v.is_enabled), location.as_ptr(), - v.start_on_access as _, + hbool_t::from(v.start_on_access), )); } } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] { if let Some(v) = self.all_coll_metadata_ops { h5try!(H5Pset_all_coll_metadata_ops(id, v as _)); @@ -1452,11 +1485,14 @@ impl FileAccessBuilder { Ok(()) } + pub fn apply(&self, plist: &mut FileAccess) -> Result<()> { + h5lock!(self.populate_plist(plist.id())) + } + pub fn finish(&self) -> Result { h5lock!({ - let plist = FileAccess::try_new()?; - self.populate_plist(plist.id())?; - Ok(plist) + let mut plist = FileAccess::try_new()?; + self.apply(&mut plist).map(|_| plist) }) } } @@ -1468,7 +1504,7 @@ impl FileAccess { } pub fn copy(&self) -> Self { - unsafe { self.deref().copy().cast() } + unsafe { self.deref().copy().cast_unchecked() } } pub fn build() -> FileAccessBuilder { @@ -1483,7 +1519,7 @@ impl FileAccess { h5try!(H5Pget_fapl_core(self.id(), &mut increment as *mut _, &mut filebacked as *mut _)); drv.increment = increment as _; drv.filebacked = filebacked > 0; - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] { let mut is_enabled: hbool_t = 0; let mut page_size: size_t = 0; @@ -1537,6 +1573,9 @@ impl FileAccess { } *layout.get_mut(i - 1) = 0xff - mapping[j]; } + for &memb_name in &memb_name { + crate::util::h5_free_memory(memb_name as *mut _); + } let relax = relax > 0; let drv = MultiDriver { files, layout, relax }; drv.validate().map(|_| drv) @@ -1552,7 +1591,7 @@ impl FileAccess { } #[doc(hidden)] - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] fn get_direct(&self) -> Result { let res = h5get!(H5Pget_fapl_direct(self.id()): size_t, size_t, size_t)?; Ok(DirectDriver { alignment: res.0 as _, block_size: res.1 as _, cbuf_size: res.2 as _ }) @@ -1567,7 +1606,7 @@ impl FileAccess { return self.get_mpio().map(FileDriver::Mpio); } } - #[cfg(h5_have_direct)] + #[cfg(feature = "have-direct")] { if drv_id == *H5FD_DIRECT { return self.get_direct().map(FileDriver::Direct); @@ -1585,11 +1624,8 @@ impl FileAccess { self.get_family().map(FileDriver::Family) } else if drv_id == *H5FD_MULTI { let multi = self.get_multi()?; - if let Some(split) = SplitDriver::from_multi(&multi) { - Ok(FileDriver::Split(split)) - } else { - Ok(FileDriver::Multi(multi)) - } + SplitDriver::from_multi(&multi) + .map_or(Ok(FileDriver::Multi(multi)), |split| Ok(FileDriver::Split(split))) } else { fail!("unknown or unsupported file driver (id: {})", drv_id); } @@ -1601,7 +1637,7 @@ impl FileAccess { #[doc(hidden)] pub fn get_fclose_degree(&self) -> Result { - h5get!(H5Pget_fclose_degree(self.id()): H5F_close_degree_t).map(|x| x.into()) + h5get!(H5Pget_fclose_degree(self.id()): H5F_close_degree_t).map(Into::into) } pub fn fclose_degree(&self) -> FileCloseDegree { @@ -1634,13 +1670,13 @@ impl FileAccess { self.get_chunk_cache().unwrap_or_else(|_| ChunkCache::default()) } - #[cfg(hdf5_1_8_7)] + #[cfg(feature = "1.8.7")] #[doc(hidden)] pub fn get_elink_file_cache_size(&self) -> Result { h5get!(H5Pget_elink_file_cache_size(self.id()): c_uint).map(|x| x as _) } - #[cfg(hdf5_1_8_7)] + #[cfg(feature = "1.8.7")] pub fn elink_file_cache_size(&self) -> u32 { self.get_elink_file_cache_size().unwrap_or(0) } @@ -1654,7 +1690,7 @@ impl FileAccess { self.get_meta_block_size().unwrap_or(2048) } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] #[doc(hidden)] pub fn get_page_buffer_size(&self) -> Result { h5get!(H5Pget_page_buffer_size(self.id()): size_t, c_uint, c_uint).map( @@ -1666,7 +1702,7 @@ impl FileAccess { ) } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn page_buffer_size(&self) -> PageBufferSize { self.get_page_buffer_size().unwrap_or_else(|_| PageBufferSize::default()) } @@ -1680,24 +1716,24 @@ impl FileAccess { self.get_sieve_buf_size().unwrap_or(64 * 1024) } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] #[doc(hidden)] pub fn get_evict_on_close(&self) -> Result { h5get!(H5Pget_evict_on_close(self.id()): hbool_t).map(|x| x > 0) } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn evict_on_close(&self) -> bool { self.get_evict_on_close().unwrap_or(false) } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] #[doc(hidden)] pub fn get_metadata_read_attempts(&self) -> Result { h5get!(H5Pget_metadata_read_attempts(self.id()): c_uint).map(|x| x as _) } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub fn metadata_read_attempts(&self) -> u32 { self.get_metadata_read_attempts().unwrap_or(1) } @@ -1710,10 +1746,10 @@ impl FileAccess { } pub fn mdc_config(&self) -> MetadataCacheConfig { - self.get_mdc_config().ok().unwrap_or_else(MetadataCacheConfig::default) + self.get_mdc_config().ok().unwrap_or_default() } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] #[doc(hidden)] pub fn get_mdc_image_config(&self) -> Result { let mut config: H5AC_cache_image_config_t = unsafe { mem::zeroed() }; @@ -1721,12 +1757,12 @@ impl FileAccess { h5call!(H5Pget_mdc_image_config(self.id(), &mut config)).map(|_| config.into()) } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn mdc_image_config(&self) -> CacheImageConfig { - self.get_mdc_image_config().ok().unwrap_or_else(CacheImageConfig::default) + self.get_mdc_image_config().ok().unwrap_or_default() } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] #[doc(hidden)] pub fn get_mdc_log_options(&self) -> Result { let mut is_enabled: hbool_t = 0; @@ -1754,29 +1790,29 @@ impl FileAccess { }) } - #[cfg(hdf5_1_10_0)] + #[cfg(feature = "1.10.0")] pub fn mdc_log_options(&self) -> CacheLogOptions { - self.get_mdc_log_options().ok().unwrap_or_else(CacheLogOptions::default) + self.get_mdc_log_options().ok().unwrap_or_default() } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] #[doc(hidden)] pub fn get_all_coll_metadata_ops(&self) -> Result { h5get!(H5Pget_all_coll_metadata_ops(self.id()): hbool_t).map(|x| x > 0) } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] pub fn all_coll_metadata_ops(&self) -> bool { self.get_all_coll_metadata_ops().unwrap_or(false) } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] #[doc(hidden)] pub fn get_coll_metadata_write(&self) -> Result { h5get!(H5Pget_coll_metadata_write(self.id()): hbool_t).map(|x| x > 0) } - #[cfg(all(hdf5_1_10_0, h5_have_parallel))] + #[cfg(all(feature = "1.10.0", feature = "have-parallel"))] pub fn coll_metadata_write(&self) -> bool { self.get_coll_metadata_write().unwrap_or(false) } @@ -1799,15 +1835,20 @@ impl FileAccess { self.get_small_data_block_size().unwrap_or(2048) } - #[cfg(hdf5_1_10_2)] + #[cfg(feature = "1.10.2")] #[doc(hidden)] pub fn get_libver_bounds(&self) -> Result { h5get!(H5Pget_libver_bounds(self.id()): H5F_libver_t, H5F_libver_t) .map(|(low, high)| LibVerBounds { low: low.into(), high: high.into() }) } - #[cfg(hdf5_1_10_2)] + #[cfg(feature = "1.10.2")] pub fn libver_bounds(&self) -> LibVerBounds { - self.get_libver_bounds().ok().unwrap_or_else(LibVerBounds::default) + self.get_libver_bounds().ok().unwrap_or_default() + } + + #[cfg(feature = "1.10.2")] + pub fn libver(&self) -> LibraryVersion { + self.get_libver_bounds().ok().unwrap_or_default().low } } diff --git a/src/hl/plist/file_create.rs b/src/hl/plist/file_create.rs index 7cc6be691..6d6cce73a 100644 --- a/src/hl/plist/file_create.rs +++ b/src/hl/plist/file_create.rs @@ -5,25 +5,28 @@ use std::ops::Deref; use bitflags::bitflags; -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] use hdf5_sys::h5f::H5F_fspace_strategy_t; use hdf5_sys::h5o::{ H5O_SHMESG_ALL_FLAG, H5O_SHMESG_ATTR_FLAG, H5O_SHMESG_DTYPE_FLAG, H5O_SHMESG_FILL_FLAG, H5O_SHMESG_NONE_FLAG, H5O_SHMESG_PLINE_FLAG, H5O_SHMESG_SDSPACE_FLAG, }; use hdf5_sys::h5p::{ - H5Pcreate, H5Pget_istore_k, H5Pget_shared_mesg_index, H5Pget_shared_mesg_nindexes, - H5Pget_shared_mesg_phase_change, H5Pget_sizes, H5Pget_sym_k, H5Pget_userblock, H5Pset_istore_k, + H5Pcreate, H5Pget_attr_creation_order, H5Pget_attr_phase_change, H5Pget_istore_k, + H5Pget_obj_track_times, H5Pget_shared_mesg_index, H5Pget_shared_mesg_nindexes, + H5Pget_shared_mesg_phase_change, H5Pget_sizes, H5Pget_sym_k, H5Pget_userblock, + H5Pset_attr_creation_order, H5Pset_attr_phase_change, H5Pset_istore_k, H5Pset_obj_track_times, H5Pset_shared_mesg_index, H5Pset_shared_mesg_nindexes, H5Pset_shared_mesg_phase_change, H5Pset_sym_k, H5Pset_userblock, }; -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] use hdf5_sys::h5p::{ H5Pget_file_space_page_size, H5Pget_file_space_strategy, H5Pset_file_space_page_size, H5Pset_file_space_strategy, }; use crate::globals::H5P_FILE_CREATE; +pub use crate::hl::plist::common::{AttrCreationOrder, AttrPhaseChange}; use crate::internal_prelude::*; /// File creation properties. @@ -54,20 +57,20 @@ impl ObjectClass for FileCreate { impl Debug for FileCreate { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let _e = silence_errors(); let mut formatter = f.debug_struct("FileCreate"); - formatter - .field("userblock", &self.userblock()) - .field("sizes", &self.sizes()) - .field("sym_k", &self.sym_k()) - .field("istore_k", &self.istore_k()) - .field("shared_mesg_phase_change", &self.shared_mesg_phase_change()) - .field("shared_mesg_indexes", &self.shared_mesg_indexes()); - #[cfg(hdf5_1_10_1)] + formatter.field("userblock", &self.userblock()); + formatter.field("sizes", &self.sizes()); + formatter.field("sym_k", &self.sym_k()); + formatter.field("istore_k", &self.istore_k()); + formatter.field("shared_mesg_phase_change", &self.shared_mesg_phase_change()); + formatter.field("shared_mesg_indexes", &self.shared_mesg_indexes()); + formatter.field("obj_track_times", &self.obj_track_times()); + formatter.field("attr_phase_change", &self.attr_phase_change()); + formatter.field("attr_creation_order", &self.attr_creation_order()); + #[cfg(feature = "1.10.1")] { - formatter - .field("file_space_page_size", &self.file_space_page_size()) - .field("file_space_strategy", &self.file_space_strategy()); + formatter.field("file_space_page_size", &self.file_space_page_size()); + formatter.field("file_space_strategy", &self.file_space_strategy()); } formatter.finish() } @@ -185,7 +188,7 @@ pub struct SharedMessageIndex { } /// File space handling strategy. -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] pub enum FileSpaceStrategy { /// Mechanisms used: free-space managers, aggregators or embedded paged @@ -204,7 +207,7 @@ pub enum FileSpaceStrategy { None, } -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] impl Default for FileSpaceStrategy { fn default() -> Self { Self::FreeSpaceManager { paged: false, persist: false, threshold: 1 } @@ -219,9 +222,12 @@ pub struct FileCreateBuilder { istore_k: Option, shared_mesg_phase_change: Option, shared_mesg_indexes: Option>, - #[cfg(hdf5_1_10_1)] + obj_track_times: Option, + attr_phase_change: Option, + attr_creation_order: Option, + #[cfg(feature = "1.10.1")] file_space_page_size: Option, - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] file_space_strategy: Option, } @@ -241,7 +247,11 @@ impl FileCreateBuilder { let v = plist.get_shared_mesg_phase_change()?; builder.shared_mesg_phase_change(v.max_list, v.min_btree); builder.shared_mesg_indexes(&plist.get_shared_mesg_indexes()?); - #[cfg(hdf5_1_10_1)] + builder.obj_track_times(plist.get_obj_track_times()?); + let apc = plist.get_attr_phase_change()?; + builder.attr_phase_change(apc.max_compact, apc.min_dense); + builder.attr_creation_order(plist.get_attr_creation_order()?); + #[cfg(feature = "1.10.1")] { builder.file_space_page_size(plist.get_file_space_page_size()?); builder.file_space_strategy(plist.get_file_space_strategy()?); @@ -305,7 +315,31 @@ impl FileCreateBuilder { self } - #[cfg(hdf5_1_10_1)] + /// Sets a property that governs the recording of times associated with an object. + /// + /// If true, time data will be recorded; if false, time data will not be recorded. + pub fn obj_track_times(&mut self, track_times: bool) -> &mut Self { + self.obj_track_times = Some(track_times); + self + } + + /// Sets attribute storage phase change thresholds. + /// + /// For further details, see [`AttrPhaseChange`](enum.AttrPhaseChange.html). + pub fn attr_phase_change(&mut self, max_compact: u32, min_dense: u32) -> &mut Self { + self.attr_phase_change = Some(AttrPhaseChange { max_compact, min_dense }); + self + } + + /// Sets flags for tracking and indexing attribute creation order. + /// + /// For further details, see [`AttrCreationOrder`](struct.AttrCreationOrder.html). + pub fn attr_creation_order(&mut self, attr_creation_order: AttrCreationOrder) -> &mut Self { + self.attr_creation_order = Some(attr_creation_order); + self + } + + #[cfg(feature = "1.10.1")] /// Sets the file space page size. /// /// The minimum size is 512. Setting a value less than 512 will result in @@ -316,7 +350,7 @@ impl FileCreateBuilder { self } - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] /// Sets the file space handling strategy and persisting free-space values. /// /// This setting cannot be changed for the life of the file. @@ -351,7 +385,16 @@ impl FileCreateBuilder { )); } } - #[cfg(hdf5_1_10_1)] + if let Some(v) = self.obj_track_times { + h5try!(H5Pset_obj_track_times(id, hbool_t::from(v))); + } + if let Some(v) = self.attr_phase_change { + h5try!(H5Pset_attr_phase_change(id, v.max_compact as _, v.min_dense as _)); + } + if let Some(v) = self.attr_creation_order { + h5try!(H5Pset_attr_creation_order(id, v.bits() as _)); + } + #[cfg(feature = "1.10.1")] { if let Some(v) = self.file_space_page_size { h5try!(H5Pset_file_space_page_size(id, v as _)); @@ -364,12 +407,14 @@ impl FileCreateBuilder { } else { H5F_fspace_strategy_t::H5F_FSPACE_STRATEGY_FSM_AGGR }; - (strategy, persist as _, threshold as _) + (strategy, hbool_t::from(persist), threshold) } FileSpaceStrategy::PageAggregation => { (H5F_fspace_strategy_t::H5F_FSPACE_STRATEGY_AGGR, 0, 0) } - _ => (H5F_fspace_strategy_t::H5F_FSPACE_STRATEGY_NONE, 0, 0), + FileSpaceStrategy::None => { + (H5F_fspace_strategy_t::H5F_FSPACE_STRATEGY_NONE, 0, 0) + } }; h5try!(H5Pset_file_space_strategy(id, strategy, persist, threshold)); } @@ -377,11 +422,14 @@ impl FileCreateBuilder { Ok(()) } + pub fn apply(&self, plist: &mut FileCreate) -> Result<()> { + h5lock!(self.populate_plist(plist.id())) + } + pub fn finish(&self) -> Result { h5lock!({ - let plist = FileCreate::try_new()?; - self.populate_plist(plist.id())?; - Ok(plist) + let mut plist = FileCreate::try_new()?; + self.apply(&mut plist).map(|_| plist) }) } } @@ -393,7 +441,7 @@ impl FileCreate { } pub fn copy(&self) -> Self { - unsafe { self.deref().copy().cast() } + unsafe { self.deref().copy().cast_unchecked() } } pub fn build() -> FileCreateBuilder { @@ -450,13 +498,13 @@ impl FileCreate { } #[doc(hidden)] - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn get_file_space_page_size(&self) -> Result { h5get!(H5Pget_file_space_page_size(self.id()): hsize_t).map(|x| x as _) } #[doc(hidden)] - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn get_file_space_strategy(&self) -> Result { let (strategy, persist, threshold) = h5get!(H5Pget_file_space_strategy(self.id()): H5F_fspace_strategy_t, hbool_t, hsize_t)?; @@ -511,14 +559,46 @@ impl FileCreate { self.get_shared_mesg_indexes().unwrap_or_else(|_| Vec::new()) } + #[doc(hidden)] + pub fn get_obj_track_times(&self) -> Result { + h5get!(H5Pget_obj_track_times(self.id()): hbool_t).map(|x| x > 0) + } + + /// Returns true if the time data is recorded. + pub fn obj_track_times(&self) -> bool { + self.get_obj_track_times().unwrap_or(true) + } + + #[doc(hidden)] + pub fn get_attr_phase_change(&self) -> Result { + h5get!(H5Pget_attr_phase_change(self.id()): c_uint, c_uint) + .map(|(mc, md)| AttrPhaseChange { max_compact: mc as _, min_dense: md as _ }) + } + + /// Returns attribute storage phase change thresholds. + pub fn attr_phase_change(&self) -> AttrPhaseChange { + self.get_attr_phase_change().unwrap_or_default() + } + + #[doc(hidden)] + pub fn get_attr_creation_order(&self) -> Result { + h5get!(H5Pget_attr_creation_order(self.id()): c_uint) + .map(AttrCreationOrder::from_bits_truncate) + } + + /// Returns flags for tracking and indexing attribute creation order. + pub fn attr_creation_order(&self) -> AttrCreationOrder { + self.get_attr_creation_order().unwrap_or_default() + } + /// Retrieves the file space page size. - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn file_space_page_size(&self) -> u64 { self.get_file_space_page_size().unwrap_or(0) } /// Retrieves the file space handling strategy. - #[cfg(hdf5_1_10_1)] + #[cfg(feature = "1.10.1")] pub fn file_space_strategy(&self) -> FileSpaceStrategy { self.get_file_space_strategy().unwrap_or_else(|_| FileSpaceStrategy::default()) } diff --git a/src/hl/plist/link_create.rs b/src/hl/plist/link_create.rs new file mode 100644 index 000000000..3f1f32716 --- /dev/null +++ b/src/hl/plist/link_create.rs @@ -0,0 +1,169 @@ +//! Link create properties. + +use std::fmt::{self, Debug}; +use std::ops::Deref; + +use hdf5_sys::h5p::{ + H5Pcreate, H5Pget_char_encoding, H5Pget_create_intermediate_group, H5Pset_char_encoding, + H5Pset_create_intermediate_group, +}; +use hdf5_sys::h5t::{H5T_cset_t, H5T_CSET_ASCII, H5T_CSET_UTF8}; + +use crate::globals::H5P_LINK_CREATE; +use crate::internal_prelude::*; + +/// Link create properties. +#[repr(transparent)] +pub struct LinkCreate(Handle); + +impl ObjectClass for LinkCreate { + const NAME: &'static str = "link create property list"; + const VALID_TYPES: &'static [H5I_type_t] = &[H5I_GENPROP_LST]; + + fn from_handle(handle: Handle) -> Self { + Self(handle) + } + + fn handle(&self) -> &Handle { + &self.0 + } + + fn validate(&self) -> Result<()> { + let class = self.class()?; + if class != PropertyListClass::LinkCreate { + fail!("expected link create property list, got {:?}", class); + } + Ok(()) + } +} + +impl Debug for LinkCreate { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let mut formatter = f.debug_struct("LinkCreate"); + formatter.field("create_intermediate_group", &self.create_intermediate_group()); + formatter.field("char_encoding", &self.char_encoding()); + formatter.finish() + } +} + +impl Deref for LinkCreate { + type Target = PropertyList; + + fn deref(&self) -> &PropertyList { + unsafe { self.transmute() } + } +} + +impl PartialEq for LinkCreate { + fn eq(&self, other: &Self) -> bool { + ::eq(self, other) + } +} + +impl Eq for LinkCreate {} + +impl Clone for LinkCreate { + fn clone(&self) -> Self { + unsafe { self.deref().clone().cast_unchecked() } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum CharEncoding { + Ascii, + Utf8, +} + +/// Builder used to create link create property list. +#[derive(Clone, Debug, Default)] +pub struct LinkCreateBuilder { + create_intermediate_group: Option, + char_encoding: Option, +} + +impl LinkCreateBuilder { + /// Creates a new link create property list builder. + pub fn new() -> Self { + Self::default() + } + + /// Creates a new builder from an existing property list. + pub fn from_plist(plist: &LinkCreate) -> Result { + let mut builder = Self::default(); + builder.create_intermediate_group(plist.get_create_intermediate_group()?); + builder.char_encoding(plist.get_char_encoding()?); + Ok(builder) + } + + pub fn create_intermediate_group(&mut self, create: bool) -> &mut Self { + self.create_intermediate_group = Some(create); + self + } + + pub fn char_encoding(&mut self, encoding: CharEncoding) -> &mut Self { + self.char_encoding = Some(encoding); + self + } + + fn populate_plist(&self, id: hid_t) -> Result<()> { + if let Some(create) = self.create_intermediate_group { + h5try!(H5Pset_create_intermediate_group(id, c_uint::from(create))); + } + if let Some(encoding) = self.char_encoding { + let encoding = match encoding { + CharEncoding::Ascii => H5T_CSET_ASCII, + CharEncoding::Utf8 => H5T_CSET_UTF8, + }; + h5try!(H5Pset_char_encoding(id, encoding)); + } + Ok(()) + } + + pub fn apply(&self, plist: &mut LinkCreate) -> Result<()> { + h5lock!(self.populate_plist(plist.id())) + } + + pub fn finish(&self) -> Result { + h5lock!({ + let mut plist = LinkCreate::try_new()?; + self.apply(&mut plist).map(|_| plist) + }) + } +} + +/// Link create property list. +impl LinkCreate { + pub fn try_new() -> Result { + Self::from_id(h5try!(H5Pcreate(*H5P_LINK_CREATE))) + } + + pub fn copy(&self) -> Self { + unsafe { self.deref().copy().cast_unchecked() } + } + + pub fn build() -> LinkCreateBuilder { + LinkCreateBuilder::new() + } + + #[doc(hidden)] + pub fn get_create_intermediate_group(&self) -> Result { + h5get!(H5Pget_create_intermediate_group(self.id()): c_uint).map(|x| x > 0) + } + + pub fn create_intermediate_group(&self) -> bool { + self.get_create_intermediate_group().unwrap_or(false) + } + + #[doc(hidden)] + pub fn get_char_encoding(&self) -> Result { + Ok(match h5get!(H5Pget_char_encoding(self.id()): H5T_cset_t)? { + H5T_CSET_ASCII => CharEncoding::Ascii, + H5T_CSET_UTF8 => CharEncoding::Utf8, + encoding => fail!("Unknown char encoding: {:?}", encoding), + }) + } + + pub fn char_encoding(&self) -> CharEncoding { + self.get_char_encoding().unwrap_or(CharEncoding::Ascii) + } +} diff --git a/src/hl/selection.rs b/src/hl/selection.rs new file mode 100644 index 000000000..9f074aca4 --- /dev/null +++ b/src/hl/selection.rs @@ -0,0 +1,1636 @@ +use std::borrow::Cow; +use std::convert::{TryFrom, TryInto}; +use std::fmt::{self, Display}; +use std::mem; +use std::ops::{Deref, Range, RangeFrom, RangeFull, RangeInclusive, RangeTo, RangeToInclusive}; +use std::slice; + +use ndarray::{self, s, Array1, Array2, ArrayView1, ArrayView2}; + +use hdf5_sys::h5s::{ + H5S_sel_type, H5Sget_select_elem_npoints, H5Sget_select_elem_pointlist, H5Sget_select_type, + H5Sget_simple_extent_ndims, H5Sselect_all, H5Sselect_elements, H5Sselect_hyperslab, + H5Sselect_none, H5S_SELECT_SET, H5S_UNLIMITED, +}; +#[cfg(feature = "1.10.0")] +use hdf5_sys::h5s::{H5Sget_regular_hyperslab, H5Sis_regular_hyperslab}; + +use crate::hl::extents::Ix; +use crate::internal_prelude::*; + +unsafe fn get_points_selection(space_id: hid_t) -> Result> { + let npoints = h5check(H5Sget_select_elem_npoints(space_id))? as usize; + let ndim = h5check(H5Sget_simple_extent_ndims(space_id))? as usize; + let mut coords = vec![0; npoints * ndim]; + h5check(H5Sget_select_elem_pointlist(space_id, 0, npoints as _, coords.as_mut_ptr()))?; + let coords = if mem::size_of::() == mem::size_of::() { + mem::transmute(coords) + } else { + coords.iter().map(|&x| x as _).collect() + }; + Ok(Array2::from_shape_vec_unchecked((npoints, ndim), coords)) +} + +unsafe fn set_points_selection(space_id: hid_t, coords: ArrayView2) -> Result<()> { + let nelem = coords.shape()[0] as _; + let same_size = mem::size_of::() == mem::size_of::(); + let coords = match (coords.as_slice(), same_size) { + (Some(coords), true) => { + Cow::Borrowed(slice::from_raw_parts(coords.as_ptr().cast(), coords.len())) + } + _ => Cow::Owned(coords.iter().map(|&x| x as _).collect()), + }; + h5check(H5Sselect_elements(space_id, H5S_SELECT_SET, nelem, coords.as_ptr()))?; + Ok(()) +} + +#[cfg_attr(not(feature = "1.10.0"), allow(unused))] +unsafe fn get_regular_hyperslab(space_id: hid_t) -> Result> { + #[cfg(feature = "1.10.0")] + { + if h5check(H5Sis_regular_hyperslab(space_id))? <= 0 { + return Ok(None); + } + let ndim = h5check(H5Sget_simple_extent_ndims(space_id))? as usize; + let (mut start, mut stride, mut count, mut block) = + (vec![0; ndim], vec![0; ndim], vec![0; ndim], vec![0; ndim]); + h5check(H5Sget_regular_hyperslab( + space_id, + start.as_mut_ptr(), + stride.as_mut_ptr(), + count.as_mut_ptr(), + block.as_mut_ptr(), + ))?; + let mut hyper = vec![]; + for i in 0..ndim { + hyper.push(RawSlice { + start: start[i] as _, + step: stride[i] as _, + count: if count[i] == H5S_UNLIMITED { None } else { Some(count[i] as _) }, + block: block[i] as _, + }); + } + return Ok(Some(hyper.into())); + } + #[allow(unreachable_code)] + Ok(None) +} + +unsafe fn set_regular_hyperslab(space_id: hid_t, hyper: &RawHyperslab) -> Result<()> { + let (mut start, mut stride, mut count, mut block) = (vec![], vec![], vec![], vec![]); + for slice_info in hyper.iter() { + start.push(slice_info.start as _); + stride.push(slice_info.step as _); + count.push(slice_info.count.map_or(H5S_UNLIMITED, |x| x as _)); + block.push(slice_info.block as _); + } + h5check(H5Sselect_hyperslab( + space_id, + H5S_SELECT_SET, + start.as_ptr(), + stride.as_ptr(), + count.as_ptr(), + block.as_ptr(), + ))?; + Ok(()) +} + +fn check_coords(coords: &Array2, shape: &[Ix]) -> Result<()> { + if coords.shape() == [0, 0] { + return Ok(()); + } + let ndim = coords.shape()[1]; + ensure!(ndim == shape.len(), "Slice ndim ({}) != shape ndim ({})", ndim, shape.len()); + for (i, &dim) in shape.iter().enumerate() { + for &d in coords.slice(s![.., i]).iter() { + ensure!(d < dim, "Index {} out of bounds for axis {} with size {}", d, i, dim); + } + } + Ok(()) +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub struct RawSlice { + pub start: Ix, + pub step: Ix, + pub count: Option, + pub block: Ix, +} + +impl RawSlice { + pub fn new(start: Ix, step: Ix, count: Option, block: Ix) -> Self { + Self { start, step, count, block } + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct RawHyperslab { + dims: Vec, +} + +impl Deref for RawHyperslab { + type Target = [RawSlice]; + + fn deref(&self) -> &Self::Target { + &self.dims + } +} + +impl RawHyperslab { + fn is_none(&self) -> bool { + self.iter().any(|s| s.count == Some(0)) + } + + fn is_all(&self, shape: &[Ix]) -> bool { + if self.is_empty() { + return true; + } + for (slice, &dim) in self.iter().zip(shape) { + let count = match slice.count { + Some(count) => count, + None => return false, + }; + if slice.start != 0 || slice.step != slice.block || count * slice.block != dim { + return false; + } + } + true + } +} + +impl From> for RawHyperslab { + fn from(dims: Vec) -> Self { + Self { dims } + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum RawSelection { + None, + All, + Points(Array2), + RegularHyperslab(RawHyperslab), + ComplexHyperslab, +} + +impl Default for RawSelection { + fn default() -> Self { + Self::All + } +} + +impl From for RawSelection { + fn from(hyper: RawHyperslab) -> Self { + Self::RegularHyperslab(hyper) + } +} + +impl From> for RawSelection { + fn from(dims: Vec) -> Self { + Self::RegularHyperslab(dims.into()) + } +} + +impl RawSelection { + pub unsafe fn apply_to_dataspace(&self, space_id: hid_t) -> Result<()> { + match self { + Self::None => { + h5check(H5Sselect_none(space_id))?; + } + Self::All => { + h5check(H5Sselect_all(space_id))?; + } + Self::Points(ref coords) => set_points_selection(space_id, coords.view())?, + Self::RegularHyperslab(ref hyper) => set_regular_hyperslab(space_id, hyper)?, + Self::ComplexHyperslab => fail!("Complex hyperslabs are not supported"), + }; + Ok(()) + } + + pub unsafe fn extract_from_dataspace(space_id: hid_t) -> Result { + Ok(match H5Sget_select_type(space_id) { + H5S_sel_type::H5S_SEL_NONE => Self::None, + H5S_sel_type::H5S_SEL_ALL => Self::All, + H5S_sel_type::H5S_SEL_POINTS => Self::Points(get_points_selection(space_id)?), + H5S_sel_type::H5S_SEL_HYPERSLABS => get_regular_hyperslab(space_id)? + .map_or(Self::ComplexHyperslab, Self::RegularHyperslab), + sel_type => fail!("Invalid selection type: {:?}", sel_type as c_int), + }) + } +} + +/// A selector of a one-dimensional array +/// +/// The following examples will use an array of 11 elements +/// to illustrate the various selections. The active elements +/// are marked with an `s`. +/// ```text +/// // An array of 11 elements +/// x x x x x x x x x x x +/// ``` +/// +/// ```text +/// Index(4) +/// _ _ _ _ s _ _ _ _ _ _ +/// ``` +/// ```text +/// Slice { start: 0, step: 3, end: 4, block: 1 } +/// s _ _ s _ _ _ _ _ _ _ +/// ``` +/// ```text +/// SliceTo { start: 2, step: 3, end: 8, block: 1 } +/// _ _ s _ _ s _ _ _ _ _ +/// ``` +/// ```text +/// SliceCount { start: 1, step: 3, count: 2, block: 1 } +/// _ s _ _ s _ _ s _ _ _ +/// ``` +/// ```text +/// Unlimited { start: 0, step: 3, block: 1 } +/// s _ _ s _ _ s _ _ s _ +/// ``` +/// ```text +/// Unlimited { start: 2, step: 3, block: 1 } +/// _ _ s _ _ s _ _ s _ _ +/// ``` +/// ```text +/// Unlimited { start: 0, step: 4, block: 2 } +/// s s _ _ s s _ _ s s _ +/// ``` +/// +/// See also [`this hdf5 tutorial`](https://support.hdfgroup.org/HDF5/Tutor/select.html) +/// for more information on hyperslab selections. +#[derive(Clone, Copy, Debug, Eq)] +pub enum SliceOrIndex { + /// A single index + Index(Ix), + /// Up to the given index + SliceTo { start: Ix, step: Ix, end: Ix, block: Ix }, + /// The given count of elements + SliceCount { start: Ix, step: Ix, count: Ix, block: Ix }, + /// An unlimited hyperslab + Unlimited { start: Ix, step: Ix, block: Ix }, +} + +impl PartialEq for SliceOrIndex { + fn eq(&self, other: &Self) -> bool { + use SliceOrIndex::{Index, SliceCount, SliceTo, Unlimited}; + match (self, other) { + (Index(s), Index(o)) => s == o, + ( + SliceTo { start: sstart, step: sstep, end: send, block: sblock }, + SliceTo { start: ostart, step: ostep, end: oend, block: oblock }, + ) => (sstart == ostart) & (sstep == ostep) & (send == oend) & (sblock == oblock), + ( + SliceCount { start: sstart, step: sstep, count: scount, block: sblock }, + SliceCount { start: ostart, step: ostep, count: ocount, block: oblock }, + ) => (sstart == ostart) & (sstep == ostep) & (scount == ocount) & (sblock == oblock), + ( + Unlimited { start: sstart, step: sstep, block: sblock }, + Unlimited { start: ostart, step: ostep, block: oblock }, + ) => (sstart == ostart) & (sstep == ostep) & (sblock == oblock), + ( + SliceTo { start: sstart, step: sstep, end: _, block: sblock }, + SliceCount { start: ostart, step: ostep, count: ocount, block: oblock }, + ) => { + if (sstart != ostart) | (sstep != ostep) | (sblock != oblock) { + return false; + } + self.count().unwrap() == *ocount + } + (SliceCount { .. }, SliceTo { .. }) => other == self, + _ => false, + } + } +} + +impl SliceOrIndex { + pub fn to_unlimited(self) -> Result { + Ok(match self { + Self::Index(_) => fail!("Cannot make index selection unlimited"), + Self::SliceTo { start, step, block, .. } + | Self::SliceCount { start, step, block, .. } + | Self::Unlimited { start, step, block } => Self::Unlimited { start, step, block }, + }) + } + + pub fn is_index(self) -> bool { + matches!(self, Self::Index(_)) + } + + pub fn is_slice(self) -> bool { + matches!(self, Self::SliceTo { .. } | Self::SliceCount { .. } | Self::Unlimited { .. }) + } + + pub fn is_unlimited(self) -> bool { + matches!(self, Self::Unlimited { .. }) + } + + fn set_blocksize(self, blocksize: Ix) -> Result { + Ok(match self { + Self::Index(_) => fail!("Cannot set blocksize for index selection"), + Self::SliceTo { start, step, end, .. } => { + Self::SliceTo { start, step, end, block: blocksize } + } + Self::SliceCount { start, step, count, .. } => { + Self::SliceCount { start, step, count, block: blocksize } + } + Self::Unlimited { start, step, .. } => { + Self::Unlimited { start, step, block: blocksize } + } + }) + } + + /// Number of elements contained in the `SliceOrIndex` + fn count(self) -> Option { + use SliceOrIndex::{Index, SliceCount, SliceTo, Unlimited}; + match self { + Index(_) => Some(1), + SliceTo { start, step, end, block } => { + Some((start + block.saturating_sub(1)..end).step_by(step).count()) + } + SliceCount { count, .. } => Some(count), + Unlimited { .. } => None, + } + } +} + +impl TryFrom for SliceOrIndex { + type Error = Error; + fn try_from(slice: ndarray::SliceInfoElem) -> Result { + Ok(match slice { + ndarray::SliceInfoElem::Index(index) => match Ix::try_from(index) { + Err(_) => fail!("Index must be non-negative"), + Ok(index) => Self::Index(index), + }, + ndarray::SliceInfoElem::Slice { start, end, step } => { + let start = + Ix::try_from(start).map_err(|_| Error::from("Index must be non-negative"))?; + let step = + Ix::try_from(step).map_err(|_| Error::from("Step must be non-negative"))?; + let end = end.map(|end| { + Ix::try_from(end).map_err(|_| Error::from("End must be non-negative")) + }); + match end { + Some(Ok(end)) => Self::SliceTo { start, step, end, block: 1 }, + None => Self::Unlimited { start, step, block: 1 }, + Some(Err(e)) => return Err(e), + } + } + ndarray::SliceInfoElem::NewAxis => fail!("ndarray NewAxis can not be mapped to hdf5"), + }) + } +} + +impl TryFrom for Hyperslab { + type Error = Error; + fn try_from(slice: ndarray::SliceInfoElem) -> Result { + Ok(vec![slice.try_into()?].into()) + } +} + +impl TryFrom for Selection { + type Error = Error; + fn try_from(slice: ndarray::SliceInfoElem) -> Result { + Hyperslab::try_from(slice).map(Into::into) + } +} + +impl From for SliceOrIndex { + fn from(_r: RangeFull) -> Self { + Self::Unlimited { start: 0, step: 1, block: 1 } + } +} + +impl TryFrom for SliceOrIndex { + type Error = std::num::TryFromIntError; + fn try_from(slice: ndarray::Slice) -> Result { + let ndarray::Slice { start, end, step } = slice; + let start = start.try_into()?; + let step = step.try_into()?; + let end = end.map(TryInto::try_into); + match end { + Some(Ok(end)) => Ok(Self::SliceTo { start, end, step, block: 1 }), + None => Ok(Self::Unlimited { start, step, block: 1 }), + Some(Err(e)) => Err(e), + } + } +} + +impl From for SliceOrIndex { + fn from(val: usize) -> Self { + Self::Index(val as _) + } +} + +impl From for Hyperslab { + fn from(slice: usize) -> Self { + (slice,).into() + } +} + +impl From for Selection { + fn from(slice: usize) -> Self { + Hyperslab::from(slice).into() + } +} + +impl From> for SliceOrIndex { + fn from(val: Range) -> Self { + Self::SliceTo { start: val.start as _, step: 1, end: val.end, block: 1 } + } +} + +impl From> for Hyperslab { + fn from(val: Range) -> Self { + vec![val.into()].into() + } +} + +impl From> for Selection { + fn from(val: Range) -> Self { + Hyperslab::from(val).into() + } +} + +impl From> for SliceOrIndex { + fn from(val: RangeToInclusive) -> Self { + let end = val.end + 1; + Self::SliceTo { start: 0, step: 1, end, block: 1 } + } +} + +impl From> for Hyperslab { + fn from(val: RangeToInclusive) -> Self { + vec![val.into()].into() + } +} + +impl From> for Selection { + fn from(val: RangeToInclusive) -> Self { + Hyperslab::from(val).into() + } +} + +impl From> for SliceOrIndex { + fn from(val: RangeFrom) -> Self { + Self::Unlimited { start: val.start, step: 1, block: 1 } + } +} + +impl From> for Hyperslab { + fn from(val: RangeFrom) -> Self { + vec![val.into()].into() + } +} + +impl From> for Selection { + fn from(val: RangeFrom) -> Self { + Hyperslab::from(val).into() + } +} + +impl From> for SliceOrIndex { + fn from(val: RangeInclusive) -> Self { + Self::SliceTo { start: *val.start(), step: 1, end: *val.end() + 1, block: 1 } + } +} + +impl From> for Hyperslab { + fn from(val: RangeInclusive) -> Self { + vec![val.into()].into() + } +} + +impl From> for Selection { + fn from(val: RangeInclusive) -> Self { + Hyperslab::from(val).into() + } +} + +impl From> for SliceOrIndex { + fn from(val: RangeTo) -> Self { + Self::SliceTo { start: 0, step: 1, end: val.end, block: 1 } + } +} + +impl From> for Hyperslab { + fn from(val: RangeTo) -> Self { + vec![val.into()].into() + } +} + +impl From> for Selection { + fn from(val: RangeTo) -> Self { + Hyperslab::from(val).into() + } +} + +impl Display for SliceOrIndex { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Self::Index(index) => write!(f, "{}", index)?, + Self::SliceTo { start, end, step, block } => { + if start != 0 { + write!(f, "{}", start)?; + } + write!(f, "..")?; + write!(f, "{}", end)?; + if step != 1 { + write!(f, ";{}", step)?; + } + if block != 1 { + write!(f, "(Bx{})", block)?; + } + } + Self::SliceCount { start, step, count, block } => { + if start != 0 { + write!(f, "{}", start)?; + } + write!(f, "+{}", count)?; + if step != 1 { + write!(f, ";{}", step)?; + } + if block != 1 { + write!(f, "(Bx{})", block)?; + } + } + Self::Unlimited { start, step, block } => { + if start != 0 { + write!(f, "{}", start)?; + } + // \u{221e} = ∞ + write!(f, "..\u{221e}")?; + if step != 1 { + write!(f, ";{}", step)?; + } + if block != 1 { + write!(f, "(Bx{})", block)?; + } + } + } + Ok(()) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +/// A descriptor of a selection of an N-dimensional array. +/// +/// The Hyperslab consists of [`slices`](SliceOrIndex) in N dimensions, +/// spanning an N-dimensional hypercube. This type is used as a [`selector`](Selection) +/// for retrieving and putting data to a [`Container`](Container). +pub struct Hyperslab { + dims: Vec, +} + +impl Hyperslab { + pub fn new>(hyper: T) -> Self { + hyper.into() + } + + pub fn try_new>(hyper: T) -> Result { + hyper.try_into() + } + + pub fn is_unlimited(&self) -> bool { + self.iter().any(|&s| s.is_unlimited()) + } + + pub fn unlimited_axis(&self) -> Option { + self.iter().enumerate().find_map(|(i, s)| if s.is_unlimited() { Some(i) } else { None }) + } + + pub fn set_unlimited(&self, axis: usize) -> Result { + if axis < self.len() { + let mut hyper = self.clone(); + hyper.dims[axis] = hyper.dims[axis].to_unlimited()?; + Ok(hyper) + } else { + fail!("Invalid axis for making hyperslab unlimited: {}", axis); + } + } + + pub fn set_block(&self, axis: usize, blocksize: Ix) -> Result { + ensure!(axis < self.len(), "Invalid axis for changing the slice to block-like: {}", axis); + let mut hyper = self.clone(); + hyper.dims[axis] = hyper.dims[axis].set_blocksize(blocksize)?; + Ok(hyper) + } + + #[doc(hidden)] + pub fn into_raw>(self, shape: S) -> Result { + let shape = shape.as_ref(); + let ndim = shape.len(); + ensure!(self.len() == ndim, "Slice ndim ({}) != shape ndim ({})", self.len(), ndim); + //let n_unlimited: usize = self.iter().map(|s| s.is_unlimited() as usize).sum(); + //ensure!(n_unlimited <= 1, "Expected at most 1 unlimited dimension, got {}", n_unlimited); + let hyper = RawHyperslab::from( + self.iter() + .zip(shape) + .enumerate() + .map(|(i, (slice, &dim))| slice_info_to_raw(i, slice, dim)) + .collect::>>()?, + ); + Ok(hyper) + } + + #[doc(hidden)] + #[allow(clippy::needless_pass_by_value)] + pub fn from_raw(hyper: RawHyperslab) -> Result { + let mut dims = vec![]; + for (axis, slice) in hyper.iter().enumerate() { + ensure!(slice.step >= slice.block, "Blocks can not overlap (axis: {})", axis); + dims.push(match slice.count { + Some(count) => SliceOrIndex::SliceCount { + start: slice.start, + step: slice.step, + count, + block: slice.block, + }, + None => SliceOrIndex::Unlimited { + start: slice.start, + step: slice.step, + block: slice.block, + }, + }); + } + Ok(Self { dims }) + } +} + +impl Deref for Hyperslab { + type Target = [SliceOrIndex]; + + fn deref(&self) -> &Self::Target { + &self.dims + } +} + +impl From> for Hyperslab { + fn from(dims: Vec) -> Self { + Self { dims } + } +} + +impl From<()> for Hyperslab { + fn from(_: ()) -> Self { + vec![].into() + } +} + +impl From for Hyperslab { + fn from(_: RangeFull) -> Self { + (0..).into() + } +} + +impl From for Hyperslab { + fn from(slice: SliceOrIndex) -> Self { + vec![slice].into() + } +} + +impl TryFrom for Hyperslab { + type Error = Error; + fn try_from(slice: ndarray::Slice) -> Result { + Ok(vec![SliceOrIndex::try_from(slice).map_err(|_| Error::from("Invalid slice"))?].into()) + } +} + +impl TryFrom> for Hyperslab +where + T: AsRef<[ndarray::SliceInfoElem]>, + Din: ndarray::Dimension, + Dout: ndarray::Dimension, +{ + type Error = Error; + fn try_from(slice: ndarray::SliceInfo) -> Result { + slice + .deref() + .as_ref() + .iter() + .copied() + .map(TryInto::try_into) + .collect::>>() + .map(Into::into) + } +} + +/// Turns `SliceOrIndex` into real dimensions given `dim` as the maximum dimension +fn slice_info_to_raw(axis: usize, slice: &SliceOrIndex, dim: Ix) -> Result { + let err_msg = || format!("out of bounds for axis {} with size {}", axis, dim); + let (start, step, count, block) = match *slice { + SliceOrIndex::Index(index) => { + ensure!(index < dim, "Index {} {}", index, err_msg()); + (index, 1, 1, 1) + } + SliceOrIndex::SliceTo { start, step, end, block } => { + ensure!(step >= 1, "Slice stride {} < 1 for axis {}", step, axis); + ensure!(start <= dim, "Slice start {} {}", start, err_msg()); + ensure!(end <= dim, "Slice end {} {}", end, err_msg()); + ensure!(step > 0, "Stride {} {}", step, err_msg()); + let count = slice.count().unwrap(); + (start, step, count, block) + } + SliceOrIndex::SliceCount { start, step, count, block } => { + ensure!(step >= 1, "Slice stride {} < 1 for axis {}", step, axis); + ensure!(start <= dim as _, "Slice start {} {}", start, err_msg()); + let end = start + block.saturating_sub(1) + step * count.saturating_sub(1); + ensure!(end <= dim, "Slice end {} {}", end, err_msg()); + (start, step, count, block) + } + SliceOrIndex::Unlimited { start, step, block } => { + // Replace infinite slice with one limited by the current dimension + return slice_info_to_raw( + axis, + &SliceOrIndex::SliceTo { start, step, end: dim, block }, + dim, + ); + } + }; + Ok(RawSlice { start, step, count: Some(count), block }) +} + +impl Display for Hyperslab { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let slice: &[_] = self.as_ref(); + write!(f, "(")?; + for (i, s) in slice.iter().enumerate() { + if i != 0 { + write!(f, ", ")?; + } + write!(f, "{}", s)?; + } + if slice.len() == 1 { + write!(f, ",")?; + } + write!(f, ")") + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +/// A selection used for reading and writing to a [`Container`](Container). +pub enum Selection { + All, + Points(Array2), + Hyperslab(Hyperslab), +} + +impl Default for Selection { + fn default() -> Self { + Self::All + } +} + +impl Selection { + pub fn new>(selection: T) -> Self { + selection.into() + } + + pub fn try_new>(selection: T) -> Result { + selection.try_into() + } + + #[doc(hidden)] + pub fn into_raw>(self, shape: S) -> Result { + let shape = shape.as_ref(); + Ok(match self { + Self::All => RawSelection::All, + Self::Points(coords) => { + check_coords(&coords, shape)?; + if coords.shape()[0] == 0 { + RawSelection::None + } else { + RawSelection::Points(coords) + } + } + Self::Hyperslab(hyper) => { + let hyper = hyper.into_raw(shape)?; + if hyper.is_none() { + RawSelection::None + } else if hyper.is_all(shape) { + RawSelection::All + } else { + RawSelection::RegularHyperslab(hyper) + } + } + }) + } + + #[doc(hidden)] + pub fn from_raw(selection: RawSelection) -> Result { + Ok(match selection { + RawSelection::None => Self::Points(Array2::default((0, 0))), + RawSelection::All => Self::All, + RawSelection::Points(coords) => Self::Points(coords), + RawSelection::RegularHyperslab(hyper) => Hyperslab::from_raw(hyper)?.into(), + RawSelection::ComplexHyperslab => fail!("Cannot convert complex hyperslabs"), + }) + } + + pub fn in_ndim(&self) -> Option { + match self { + Self::All => None, + Self::Points(ref points) => { + if points.shape() == [0, 0] { + None + } else { + Some(points.shape()[1]) + } + } + Self::Hyperslab(ref hyper) => Some(hyper.len()), + } + } + + pub fn out_ndim(&self) -> Option { + match self { + Self::All => None, + Self::Points(ref points) => Some(usize::from(points.shape() != [0, 0])), + Self::Hyperslab(ref hyper) => { + Some(hyper.iter().map(|&s| usize::from(s.is_slice())).sum()) + } + } + } + + pub fn out_shape>(&self, in_shape: S) -> Result> { + let in_shape = in_shape.as_ref(); + match self { + Self::All => Ok(in_shape.to_owned()), + Self::Points(ref points) => check_coords(points, in_shape) + .and(Ok(if points.shape() == [0, 0] { vec![] } else { vec![points.shape()[0]] })), + Self::Hyperslab(ref hyper) => hyper + .clone() + .into_raw(in_shape)? + .iter() + .zip(hyper.iter()) + .filter_map(|(&r, &s)| match (r.count, s.is_index()) { + (Some(_), true) => None, + (Some(count), false) => Some(Ok(count * r.block)), + (None, _) => { + Some(Err("Unable to get the shape for unlimited hyperslab".into())) + } + }) + .collect(), + } + } + + pub fn is_all(&self) -> bool { + self == &Self::All + } + + pub fn is_points(&self) -> bool { + if let Self::Points(ref points) = self { + points.shape() != [0, 0] + } else { + false + } + } + + pub fn is_none(&self) -> bool { + if let Self::Points(points) = self { + points.shape() == [0, 0] + } else { + false + } + } + + pub fn is_hyperslab(&self) -> bool { + matches!(self, Self::Hyperslab(_)) + } +} + +impl Display for Selection { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Self::All => write!(f, ".."), + Self::Points(ref points) => { + if points.shape() == [0, 0] { + write!(f, "[]") + } else { + write!(f, "{}", points) + } + } + Self::Hyperslab(hyper) => write!(f, "{}", hyper), + } + } +} + +impl From<&Self> for Selection { + fn from(sel: &Self) -> Self { + sel.clone() + } +} + +impl From for Selection { + fn from(_: RangeFull) -> Self { + Self::All + } +} + +impl From<()> for Selection { + fn from(_: ()) -> Self { + Hyperslab::from(()).into() + } +} + +impl From for Selection { + fn from(slice: SliceOrIndex) -> Self { + Self::Hyperslab(slice.into()) + } +} + +impl From for Selection { + fn from(hyper: Hyperslab) -> Self { + Self::Hyperslab(hyper) + } +} + +impl TryFrom for Selection { + type Error = Error; + fn try_from(slice: ndarray::Slice) -> Result { + Hyperslab::try_from(slice).map(Into::into) + } +} + +impl TryFrom> for Selection +where + T: AsRef<[ndarray::SliceInfoElem]>, + Din: ndarray::Dimension, + Dout: ndarray::Dimension, +{ + type Error = Error; + fn try_from(slice: ndarray::SliceInfo) -> Result { + Hyperslab::try_from(slice).map(Into::into) + } +} + +impl From> for Selection { + fn from(points: Array2) -> Self { + Self::Points(points) + } +} + +impl From> for Selection { + fn from(points: Array1) -> Self { + let n = points.len(); + Self::Points(if n == 0 { + Array2::zeros((0, 0)) + } else { + points.insert_axis(ndarray::Axis(1)) + }) + } +} + +impl From> for Selection { + fn from(points: ArrayView2<'_, Ix>) -> Self { + points.to_owned().into() + } +} + +impl From> for Selection { + fn from(points: ArrayView1<'_, Ix>) -> Self { + points.to_owned().into() + } +} + +impl From<&Array2> for Selection { + fn from(points: &Array2) -> Self { + points.clone().into() + } +} + +impl From<&Array1> for Selection { + fn from(points: &Array1) -> Self { + points.clone().into() + } +} + +impl From> for Selection { + fn from(points: Vec) -> Self { + Array1::from(points).into() + } +} + +impl From<&[Ix]> for Selection { + fn from(points: &[Ix]) -> Self { + ArrayView1::from(points).into() + } +} + +impl From<[Ix; N]> for Selection { + fn from(points: [Ix; N]) -> Self { + points.as_ref().into() + } +} + +impl From<&[Ix; N]> for Selection { + fn from(points: &[Ix; N]) -> Self { + points.as_ref().into() + } +} + +macro_rules! impl_tuple { + () => (); + + ($head:ident, $($tail:ident,)*) => ( + #[allow(non_snake_case)] + impl<$head, $($tail,)*> From<($head, $($tail,)*)> for Hyperslab + where $head: Into, $($tail: Into,)* + { + fn from(slice: ($head, $($tail,)*)) -> Self { + let ($head, $($tail,)*) = slice; + vec![($head).into(), $(($tail).into(),)*].into() + } + } + + #[allow(non_snake_case)] + impl<$head, $($tail,)*> From<($head, $($tail,)*)> for Selection + where $head: Into, $($tail: Into,)* + { + fn from(slice: ($head, $($tail,)*)) -> Self { + Hyperslab::from(slice).into() + } + } + + impl_tuple! { $($tail,)* } + ) +} + +impl_tuple! { T0, T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, } + +#[cfg(test)] +mod test { + use ndarray::{arr1, arr2, s, Array2}; + use pretty_assertions::assert_eq; + + use super::{ + Hyperslab, RawHyperslab, RawSelection, RawSlice, Selection, SliceOrIndex, SliceOrIndex::*, + }; + use crate::internal_prelude::*; + + #[test] + fn count() { + use SliceOrIndex::*; + assert_eq!(Unlimited { start: 0, step: 1, block: 1 }.count(), None); + assert_eq!(Index(23412).count(), Some(1)); + assert_eq!(SliceCount { start: 0, step: 1431, count: 41, block: 4 }.count(), Some(41)); + assert_eq!(SliceTo { start: 0, step: 1, end: 1, block: 1 }.count(), Some(1)); + assert_eq!(SliceTo { start: 0, step: 10, end: 1, block: 1 }.count(), Some(1)); + assert_eq!(SliceTo { start: 0, step: 1, end: 10, block: 1 }.count(), Some(10)); + assert_eq!(SliceTo { start: 0, step: 10, end: 10, block: 1 }.count(), Some(1)); + assert_eq!(SliceTo { start: 0, step: 9, end: 10, block: 1 }.count(), Some(2)); + assert_eq!(SliceTo { start: 0, step: 9, end: 10, block: 2 }.count(), Some(1)); + assert_eq!(SliceTo { start: 1, step: 3, end: 6, block: 1 }.count(), Some(2)); + } + + #[test] + fn test_slice_or_index_impl() -> Result<()> { + use std::convert::TryFrom; + let s = SliceOrIndex::from(2); + assert_eq!(s, Index(2)); + assert!(s.is_index()); + assert!(!s.is_slice()); + assert!(!s.is_unlimited()); + assert_err!(s.to_unlimited(), "Cannot make index selection unlimited"); + assert_err!(s.set_blocksize(2), "Cannot set blocksize for index selection"); + + let s = SliceOrIndex::from(2..=5); + assert_eq!(s, SliceTo { start: 2, step: 1, end: 6, block: 1 }); + assert!(!s.is_index()); + assert!(s.is_slice()); + assert!(!s.is_unlimited()); + assert_eq!(s.to_unlimited().unwrap(), Unlimited { start: 2, step: 1, block: 1 }); + assert_eq!(s.set_blocksize(4)?, SliceTo { start: 2, step: 1, end: 6, block: 4 }); + assert_eq!( + SliceOrIndex::try_from(*s![1..2;3].get(0).unwrap()).unwrap(), + SliceTo { start: 1, step: 3, end: 2, block: 1 } + ); + + let s = SliceOrIndex::from(3..).to_unlimited()?; + assert_eq!(s, Unlimited { start: 3, step: 1, block: 1 }); + assert!(!s.is_index()); + assert!(s.is_slice()); + assert!(s.is_unlimited()); + assert_eq!(s.to_unlimited()?, s); + + for (s, f) in &[ + (Unlimited { start: 0, step: 1, block: 1 }, "..∞"), + (Unlimited { start: 0, step: 1, block: 2 }, "..∞(Bx2)"), + (SliceTo { start: 0, step: 1, end: 5, block: 1 }, "..5"), + (SliceTo { start: 0, step: 3, end: 5, block: 2 }, "..5;3(Bx2)"), + (Unlimited { start: 0, step: 1, block: 1 }, "..∞"), + (Unlimited { start: 0, step: 3, block: 2 }, "..∞;3(Bx2)"), + (SliceCount { start: 1, step: 3, count: 5, block: 1 }, "1+5;3"), + (SliceCount { start: 0, step: 3, count: 5, block: 2 }, "+5;3(Bx2)"), + (SliceCount { start: 1, step: 3, count: 5, block: 2 }, "1+5;3(Bx2)"), + ] { + assert_eq!(&format!("{}", s), f); + } + + Ok(()) + } + + #[test] + fn test_selection_hyperslab_new() { + macro_rules! check { + ($hs1:expr, $hs2:expr) => { + assert_eq!(Hyperslab::try_new($hs1).unwrap().as_ref().to_owned(), $hs2); + let s = Selection::try_new($hs1).unwrap(); + assert_eq!(s, Selection::new(Hyperslab::new($hs2))); + assert_eq!(s, Selection::Hyperslab(Hyperslab::new($hs2))); + }; + } + + check!((), vec![]); + check!(Index(2), vec![Index(2)]); + check!(ndarray::SliceInfoElem::Index(10), vec![Index(10)]); + check!(3.., vec![Unlimited { start: 3, step: 1, block: 1 }]); + + assert_eq!( + Hyperslab::new(..).as_ref().to_owned(), + vec![Unlimited { start: 0, step: 1, block: 1 }] + ); + assert_eq!(Selection::new(..), Selection::All); + + use std::convert::TryFrom; + assert!(Selection::try_from(s![-1, 2..;3, ..4]).is_err()); + assert!(Selection::try_from(ndarray::Slice::new(-1, None, 2)).is_err()); + } + + #[test] + fn test_selection_points_new() { + macro_rules! check { + ($e:expr, $p:expr) => { + let s = Selection::from($e); + assert_eq!(s, Selection::Points($p.clone())); + }; + } + + let a2 = arr2(&[[1, 2], [3, 4]]); + check!(a2.clone(), &a2); + check!(&a2, &a2); + check!(a2.view(), &a2); + let a1 = arr1(&[1, 2, 3]); + let a1_2 = arr2(&[[1], [2], [3]]); + check!(a1.clone(), &a1_2); + check!(&a1, &a1_2); + check!(a1.view(), &a1_2); + check!(a1.as_slice().unwrap(), &a1_2); + check!(a1.to_vec(), &a1_2); + check!([1, 2, 3], &a1_2); + check!(&[1, 2, 3], &a1_2); + + let s = Selection::new(&[]); + assert_eq!(s, Selection::Points(Array2::zeros((0, 0)))); + } + + #[test] + fn test_hyperslab_impl() -> Result<()> { + let h = Hyperslab::try_new(s![0, 1..10, 2..;3])?; + assert_eq!( + h.as_ref().to_owned(), + vec![ + Index(0), + SliceTo { start: 1, step: 1, end: 10, block: 1 }, + Unlimited { start: 2, step: 3, block: 1 }, + ] + ); + assert!(h.is_unlimited()); + assert_eq!(h.unlimited_axis(), Some(2)); + + assert_err!(h.set_unlimited(0), "Cannot make index selection unlimited"); + h.set_unlimited(1)?; + assert_err!(h.set_unlimited(3), "Invalid axis for making hyperslab unlimited: 3"); + let u = h.set_unlimited(2)?; + assert!(u.is_unlimited()); + assert_eq!(u.unlimited_axis(), Some(2)); + assert_eq!( + u.as_ref().to_owned(), + vec![ + Index(0), + SliceTo { start: 1, step: 1, end: 10, block: 1 }, + Unlimited { start: 2, step: 3, block: 1 }, + ] + ); + u.set_unlimited(1).unwrap(); + assert_eq!(u.set_unlimited(2)?, u); + + assert_err!(u.set_block(0, 1), "Cannot set blocksize for index selection"); + assert_err!(u.set_block(3, 1), "Invalid axis for changing the slice to block-like: 3"); + let b = u.set_block(1, 2)?; + assert_eq!( + b.as_ref().to_owned(), + vec![ + Index(0), + SliceTo { start: 1, step: 1, end: 10, block: 2 }, + Unlimited { start: 2, step: 3, block: 1 }, + ] + ); + let b = b.set_block(2, 2)?; + assert_eq!( + b.as_ref().to_owned(), + vec![ + Index(0), + SliceTo { start: 1, step: 1, end: 10, block: 2 }, + Unlimited { start: 2, step: 3, block: 2 }, + ] + ); + assert_eq!(b.set_block(1, 2)?.set_block(2, 2)?, b); + + Ok(()) + } + + #[test] + fn test_selection_default() { + assert!(Selection::default().is_all()); + } + + #[test] + fn test_selection_all_impl() { + let s = Selection::new(..); + assert_eq!(s, s); + assert!(s.is_all() && !s.is_hyperslab() && !s.is_points() && !s.is_none()); + assert_ne!(s, Selection::new(())); + assert_ne!(s, Selection::new(&[])); + assert_eq!(s.in_ndim(), None); + assert_eq!(s.out_ndim(), None); + assert_eq!(s.out_shape(&[1, 2, 3]).unwrap(), &[1, 2, 3]); + assert_eq!(format!("{}", s), ".."); + } + + #[test] + fn test_selection_points_impl() { + let s = Selection::new(arr2(&[[1, 2, 3], [4, 5, 6]])); + assert_eq!(s, s); + assert!(!s.is_all() && !s.is_hyperslab() && s.is_points() && !s.is_none()); + assert_ne!(s, Selection::new(())); + assert_ne!(s, Selection::new(..)); + assert_eq!(s.in_ndim(), Some(3)); + assert_eq!(s.out_ndim(), Some(1)); + assert_eq!(s.out_shape(&[5, 10, 15]).unwrap(), &[2]); + assert_eq!(format!("{}", s), "[[1, 2, 3],\n [4, 5, 6]]"); + } + + #[test] + fn test_selection_none_impl() { + let s = Selection::new(&[]); + assert_eq!(s, s); + assert!(!s.is_all() && !s.is_hyperslab() && !s.is_points() && s.is_none()); + assert_eq!(s.in_ndim(), None); + assert_eq!(s.out_shape(&[1, 2, 3]).unwrap(), &[]); + assert_eq!(format!("{}", s), "[]"); + } + + #[test] + fn test_selection_hyperslab_impl() { + let s = Selection::try_new(s![1, 2..;2]).unwrap(); + assert_eq!(s, s); + assert!(!s.is_all() && s.is_hyperslab() && !s.is_points() && !s.is_none()); + assert_ne!(s, Selection::new(..)); + assert_ne!(s, Selection::new(&[])); + assert_eq!(s.in_ndim(), Some(2)); + assert_eq!(s.out_ndim(), Some(1)); + assert_eq!(s.out_shape(&[10, 20]).unwrap(), &[9]); + assert_eq!(format!("{}", Selection::try_new(s![1]).unwrap()), "(1,)"); + assert_eq!(format!("{}", Selection::new(())), "()"); + + let h = Hyperslab::try_new(s![1, 2..;3, ..4, 5..]).unwrap().set_unlimited(1).unwrap(); + assert_eq!(format!("{}", h), "(1, 2..∞;3, ..4, 5..∞)"); + let s = Selection::new(h); + assert_eq!(format!("{}", s), "(1, 2..∞;3, ..4, 5..∞)"); + assert_eq!(s.out_shape(&[2, 3, 4, 5]).unwrap(), &[1, 4, 0]); + } + + #[test] + fn test_hyperslab_into_from_raw_err() { + use std::convert::TryInto; + #[track_caller] + fn check, S: AsRef<[Ix]>>(hyper: H, shape: S, err: &str) + where + H::Error: std::fmt::Debug, + { + let hyper = hyper.try_into().unwrap(); + assert_err!(hyper.into_raw(shape.as_ref()), err); + } + + check(s![1, 2], &[1, 2, 3], "Slice ndim (2) != shape ndim (3)"); + assert!(Hyperslab::try_new(s![0, ..;-1]).is_err()); + check(s![0, 0], &[0, 1], "Index 0 out of bounds for axis 0 with size 0"); + check(s![.., 1], &[0, 1], "Index 1 out of bounds for axis 1 with size 1"); + assert!(Hyperslab::try_new(s![-3]).is_err()); + check(s![2], &[2], "Index 2 out of bounds for axis 0 with size 2"); + + check(s![0, 3..], &[1, 2], "Slice start 3 out of bounds for axis 1 with size 2"); + assert!(Hyperslab::try_new(s![-2..;2, 0]).is_err()); + check(s![0, ..=3], &[1, 2], "Slice end 4 out of bounds for axis 1 with size 2"); + assert!(Hyperslab::try_new(s![-2..;2, 0]).is_err()); + + check( + (0, Unlimited { start: 3, step: 1, block: 1 }), + &[1, 2], + "Slice start 3 out of bounds for axis 1 with size 2", + ); + + assert_err!( + Hyperslab::from_raw(vec![RawSlice::new(0, 2, Some(1), 3)].into()), + "Blocks can not overlap (axis: 0)" + ); + } + + #[test] + fn test_points_into_raw_err() { + assert_err!( + Selection::new(arr2(&[[1, 2], [3, 5]])).out_shape(&[4, 3]), + "Index 5 out of bounds for axis 1 with size 3" + ); + } + + #[test] + fn test_hyperslab_into_from_raw() -> Result<()> { + use std::convert::TryInto; + fn check( + shape: S, hyper: H, exp_raw_hyper: RH, exp_raw_sel: Option, exp_hyper2: H2, + exp_sel2: Option, + ) where + S: AsRef<[Ix]>, + H: TryInto, + H::Error: std::fmt::Debug, + RH: Into, + RS: Into, + H2: TryInto, + H2::Error: std::fmt::Debug, + S2: TryInto, + S2::Error: std::fmt::Debug, + { + let shape = shape.as_ref(); + let hyper = hyper.try_into().unwrap(); + let exp_raw_hyper = exp_raw_hyper.into(); + let exp_raw_sel = exp_raw_sel.map(Into::into).unwrap_or(exp_raw_hyper.clone().into()); + let exp_hyper2 = exp_hyper2.try_into().unwrap(); + let exp_sel2 = exp_sel2 + .map(|x| TryInto::try_into(x).unwrap()) + .unwrap_or(exp_hyper2.clone().try_into().unwrap()); + + let raw_hyper = hyper.clone().into_raw(shape).unwrap(); + assert_eq!(raw_hyper, exp_raw_hyper); + + let sel = Selection::new(hyper.clone()); + let raw_sel = sel.clone().into_raw(shape).unwrap(); + assert_eq!(raw_sel, exp_raw_sel); + + let hyper2 = Hyperslab::from_raw(raw_hyper.clone()).unwrap(); + assert_eq!(hyper2, exp_hyper2); + + let sel2 = Selection::from_raw(raw_sel.clone()).unwrap(); + assert_eq!(sel2, exp_sel2); + + let raw_hyper2 = hyper2.clone().into_raw(shape).unwrap(); + assert_eq!(raw_hyper2, raw_hyper); + + let raw_sel2 = sel2.clone().into_raw(shape).unwrap(); + assert_eq!(raw_sel2, raw_sel); + } + + check(&[], (), vec![], Some(RawSelection::All), (), Some(Selection::All)); + + check( + &[5, 5, 5], + s![.., 0..5, ..=4], + vec![RawSlice::new(0, 1, Some(5), 1); 3], + Some(RawSelection::All), + s![..5, ..5, ..5], + Some(Selection::All), + ); + + check( + &[0; 6], + s![.., 0.., ..0, 0..0, ..;1, ..;2], + vec![ + RawSlice::new(0, 1, Some(0), 1), + RawSlice::new(0, 1, Some(0), 1), + RawSlice::new(0, 1, Some(0), 1), + RawSlice::new(0, 1, Some(0), 1), + RawSlice::new(0, 1, Some(0), 1), + RawSlice::new(0, 2, Some(0), 1), + ], + Some(RawSelection::None), + s![..0, ..0, ..0, ..0, ..0, ..0;2], + Some(Selection::new(&[])), + ); + + assert!(Hyperslab::try_new( + s![.., ..;2, 1.., 1..;2, -3..=1, -3..=-1;2, ..=-1, ..=-1;3, 0..-1, 2..=-1] + ) + .is_err()); + assert!(Hyperslab::try_new( + s![.., ..;2, 1.., 1..;2, -3..=1, -3..=-1;2, ..=-1, ..=-1;3, 0..-1, 2..=-1] + ) + .is_err()); + assert!(Hyperslab::try_new(s![-5.., -10, 1..-1;2, 1],).is_err()); + assert!(Hyperslab::try_new(s![5..10, 0..1, 1..8;2, 1..2],).is_ok()); + + check( + &[7; 7], + Hyperslab::try_new(s![1..2;3, 1..3;3, 1..4;3, 1..5;3, 1..6;3, 1..7;3, ..7;3])?, + vec![ + RawSlice::new(1, 3, Some(1), 1), + RawSlice::new(1, 3, Some(1), 1), + RawSlice::new(1, 3, Some(1), 1), + RawSlice::new(1, 3, Some(2), 1), + RawSlice::new(1, 3, Some(2), 1), + RawSlice::new(1, 3, Some(2), 1), + RawSlice::new(0, 3, Some(3), 1), + ], + None as Option, + Hyperslab::try_new(s![1..2;3, 1..2;3, 1..2;3, 1..5;3, 1..5;3, 1..5;3, ..7;3])?, + None as Option, + ); + + Ok(()) + } + + #[test] + fn test_in_out_shape_ndim() -> Result<()> { + use std::convert::TryInto; + fn check, E: AsRef<[Ix]>>( + sel: S, exp_in_ndim: Option, exp_out_shape: E, exp_out_ndim: Option, + ) -> Result<()> + where + S::Error: std::fmt::Debug, + { + let in_shape = [7, 8]; + let sel = sel.try_into().unwrap(); + assert_eq!(sel.in_ndim(), exp_in_ndim); + let out_shape = sel.out_shape(&in_shape)?; + let out_ndim = sel.out_ndim(); + assert_eq!(out_shape.as_slice(), exp_out_shape.as_ref()); + assert_eq!(out_ndim, exp_out_ndim); + if let Some(out_ndim) = out_ndim { + assert_eq!(out_shape.len(), out_ndim); + } else { + assert_eq!(out_shape.len(), in_shape.len()); + } + Ok(()) + } + + check(.., None, [7, 8], None)?; + check(Array2::zeros((0, 0)), None, [], Some(0))?; + check(arr2(&[[0, 1]]), Some(2), [1], Some(1))?; + check(arr2(&[[0, 1], [2, 3], [4, 5]]), Some(2), [3], Some(1))?; + check(s![1, 2], Some(2), [], Some(0))?; + check(s![1, 2..;2], Some(2), [3], Some(1))?; + check(s![1..;3, 2], Some(2), [2], Some(1))?; + check(s![1..;3, 2..;2], Some(2), [2, 3], Some(2))?; + check(Hyperslab::try_new(s![1, 2..;2])?.set_block(1, 6)?, Some(2), [6], Some(1))?; + check(Hyperslab::try_new(s![1..;3, 2])?.set_block(0, 6)?, Some(2), [6], Some(1))?; + check( + Hyperslab::try_new(s![1..;3, 2..;2])?.set_block(0, 6)?.set_block(1, 6)?, + Some(2), + [6, 6], + Some(2), + )?; + + assert_err!( + check(arr2(&[[1, 2, 3]]), Some(3), [], None), + "Slice ndim (3) != shape ndim (2)" + ); + assert_err!( + check(arr2(&[[7, 1]]), Some(2), [], None), + "Index 7 out of bounds for axis 0 with size 7" + ); + + Ok(()) + } + + #[test] + fn test_selection_into_from_raw() -> Result<()> { + use std::convert::TryInto; + fn check( + shape: Sh, sel: S, exp_raw_sel: RS, exp_sel2: Option, + ) -> Result<()> + where + Sh: AsRef<[Ix]>, + S: TryInto, + S::Error: std::fmt::Debug, + RS: Into, + S2: TryInto, + S2::Error: std::fmt::Debug, + { + let shape = shape.as_ref(); + let sel = sel.try_into().unwrap(); + let exp_raw_sel = exp_raw_sel.into(); + let exp_sel2 = exp_sel2.map_or(sel.clone(), |x| x.try_into().unwrap()); + + let raw_sel = sel.clone().into_raw(shape)?; + assert_eq!(raw_sel, exp_raw_sel); + + let sel2 = Selection::from_raw(raw_sel.clone())?; + assert_eq!(sel2, exp_sel2); + + let raw_sel2 = sel2.clone().into_raw(shape)?; + assert_eq!(raw_sel2, raw_sel); + + Ok(()) + } + + check(&[5, 6], .., RawSelection::All, None as Option)?; + check(&[5, 6], Array2::zeros((0, 0)), RawSelection::None, None as Option)?; + check(&[5, 6], Array2::zeros((0, 2)), RawSelection::None, Some(Array2::zeros((0, 0))))?; + check( + &[5, 6], + arr2(&[[1, 2]]), + RawSelection::Points(arr2(&[[1, 2]])), + None as Option, + )?; + check(&[5, 6], s![1..1;2, 3], RawSelection::None, Some(&[]))?; + check( + &[5, 6], + s![1..;2, 3], + vec![RawSlice::new(1, 2, Some(2), 1), RawSlice::new(3, 1, Some(1), 1)], + Some(s![1..4;2, 3..4]), + )?; + + assert_err!( + Selection::from_raw(RawSelection::ComplexHyperslab), + "Cannot convert complex hyperslabs" + ); + + Ok(()) + } + + #[test] + fn test_apply_extract_selection() -> Result<()> { + use crate::sync::sync; + use hdf5_sys::h5s::{H5Sclose, H5Screate_simple}; + use std::ptr; + + fn check( + shape: Sh, raw_sel: RawSelection, exp_raw_sel2: Option, + ) -> Result<()> + where + Sh: AsRef<[Ix]>, + { + let shape = shape.as_ref(); + let c_shape = shape.iter().map(|&x| x as _).collect::>(); + let exp_raw_sel2 = exp_raw_sel2.unwrap_or(raw_sel.clone()); + sync(|| unsafe { + let space_id = + h5check(H5Screate_simple(shape.len() as _, c_shape.as_ptr(), ptr::null_mut()))?; + raw_sel.apply_to_dataspace(space_id)?; + let raw_sel2 = RawSelection::extract_from_dataspace(space_id)?; + assert_eq!(raw_sel2, exp_raw_sel2); + H5Sclose(space_id); + Ok(()) + }) + } + + check(&[1, 2], RawSelection::None, None)?; + check(&[1, 2], RawSelection::All, None)?; + check(&[1, 2], RawSelection::Points(arr2(&[[0, 1], [0, 0]])), None)?; + + let exp = + if cfg!(feature = "1.10.0") { None } else { Some(RawSelection::ComplexHyperslab) }; + check( + &[8, 9, 10, 11], + vec![ + RawSlice::new(1, 2, None, 2), + RawSlice::new(1, 2, Some(2), 2), + RawSlice::new(1, 1, Some(3), 1), + RawSlice::new(1, 2, Some(4), 1), + ] + .into(), + exp, + )?; + + assert_err!( + check(&[1, 2], RawSelection::ComplexHyperslab, None), + "Complex hyperslabs are not supported" + ); + assert_err!( + check(&[1, 2], RawSelection::Points(Array2::zeros((0, 2))), None), + "H5Sselect_elements(): elements not specified" + ); + + Ok(()) + } + + #[test] + fn use_selection_on_dataset() { + with_tmp_file(|file| { + let ds = file.new_dataset::().shape((5, 5)).create("ds_fixed").unwrap(); + assert_eq!(&ds.shape(), &[5, 5]); + let ds = file.new_dataset::().shape((0.., 0..)).create("ds_twounlim").unwrap(); + assert_eq!(&ds.shape(), &[0, 0]); + ds.resize((5, 5)).unwrap(); + assert_eq!(&ds.shape(), &[5, 5]); + let ds = file.new_dataset::().shape((5, 0..)).create("ds_oneunlim0").unwrap(); + assert_eq!(&ds.shape(), &[5, 0]); + ds.resize((5, 5)).unwrap(); + assert_eq!(&ds.shape(), &[5, 5]); + let ds = file.new_dataset::().shape((0.., 5)).create("ds_oneunlim1").unwrap(); + assert_eq!(&ds.shape(), &[0, 5]); + ds.resize((5, 5)).unwrap(); + assert_eq!(&ds.shape(), &[5, 5]); + }) + } +} diff --git a/src/hl/space.rs b/src/hl/space.rs deleted file mode 100644 index ee6b7bb62..000000000 --- a/src/hl/space.rs +++ /dev/null @@ -1,229 +0,0 @@ -use std::convert::AsRef; -use std::fmt::{self, Debug}; -use std::ops::Deref; -use std::ptr; - -use ndarray::SliceOrIndex; - -use hdf5_sys::h5s::{ - H5Scopy, H5Screate_simple, H5Sget_simple_extent_dims, H5Sget_simple_extent_ndims, - H5Sselect_hyperslab, H5S_SELECT_SET, -}; - -use crate::internal_prelude::*; - -/// Represents the HDF5 dataspace object. -#[repr(transparent)] -#[derive(Clone)] -pub struct Dataspace(Handle); - -impl ObjectClass for Dataspace { - const NAME: &'static str = "dataspace"; - const VALID_TYPES: &'static [H5I_type_t] = &[H5I_DATASPACE]; - - fn from_handle(handle: Handle) -> Self { - Self(handle) - } - - fn handle(&self) -> &Handle { - &self.0 - } - - fn short_repr(&self) -> Option { - if self.ndim() == 1 { - Some(format!("({},)", self.dims()[0])) - } else { - let dims = self.dims().iter().map(ToString::to_string).collect::>().join(", "); - Some(format!("({})", dims)) - } - } -} - -impl Debug for Dataspace { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.debug_fmt(f) - } -} - -impl Deref for Dataspace { - type Target = Object; - - fn deref(&self) -> &Object { - unsafe { self.transmute() } - } -} - -impl Dataspace { - /// Copies the dataspace. - pub fn copy(&self) -> Self { - Self::from_id(h5lock!(H5Scopy(self.id()))).unwrap_or_else(|_| Self::invalid()) - } - - /// Select a slice (known as a 'hyperslab' in HDF5 terminology) of the Dataspace. - /// Returns the shape of array that is capable of holding the resulting slice. - /// Useful when you want to read a subset of a dataset. - pub fn select_slice(&self, slice: S) -> Result> - where - S: AsRef<[SliceOrIndex]>, - { - let shape = self.dims(); - let ss: &[SliceOrIndex] = slice.as_ref(); - - let mut start_vec = Vec::with_capacity(ss.len()); - let mut stride_vec = Vec::with_capacity(ss.len()); - let mut count_vec = Vec::with_capacity(ss.len()); - let mut shape_vec = Vec::with_capacity(ss.len()); - - for i in 0..ss.len() { - let (start, stride, count) = Self::get_start_stride_count(&ss[i], shape[i])?; - start_vec.push(start); - stride_vec.push(stride); - count_vec.push(count); - shape_vec.push(count as Ix); - } - - h5try!(H5Sselect_hyperslab( - self.id(), - H5S_SELECT_SET, - start_vec.as_ptr(), - stride_vec.as_ptr(), - count_vec.as_ptr(), - ptr::null() - )); - Ok(shape_vec) - } - - fn get_start_stride_count(v: &SliceOrIndex, len: Ix) -> Result<(u64, u64, u64)> { - match v { - SliceOrIndex::Slice { start, end, step } => { - let end = end.unwrap_or(len as isize); - ensure!(end <= len as _, "slice extends beyond dataspace bounds"); - ensure!(*step >= 1, "step must be >= 1 (got {})", step); - - if end < *start { - return Ok((0, 1, 0)); - } - - let count = if (end - start) <= 0 { 0 } else { 1 + (end - start - 1) / step }; - - Ok((*start as u64, *step as u64, count as u64)) - } - SliceOrIndex::Index(v) => Ok((*v as u64, 1, 1)), - } - } - - pub fn try_new(d: D, resizable: bool) -> Result { - let rank = d.ndim(); - let mut dims: Vec = vec![]; - let mut max_dims: Vec = vec![]; - for dim in &d.dims() { - dims.push(*dim as _); - max_dims.push(if resizable { H5S_UNLIMITED } else { *dim as _ }); - } - Self::from_id(h5try!(H5Screate_simple(rank as _, dims.as_ptr(), max_dims.as_ptr()))) - } - - pub fn maxdims(&self) -> Vec { - let ndim = self.ndim(); - if ndim > 0 { - let mut maxdims: Vec = Vec::with_capacity(ndim); - unsafe { - maxdims.set_len(ndim); - } - if h5call!(H5Sget_simple_extent_dims(self.id(), ptr::null_mut(), maxdims.as_mut_ptr())) - .is_ok() - { - return maxdims.iter().cloned().map(|x| x as _).collect(); - } - } - vec![] - } - - pub fn resizable(&self) -> bool { - self.maxdims().iter().any(|&x| x == H5S_UNLIMITED as _) - } -} - -impl Dimension for Dataspace { - fn ndim(&self) -> usize { - h5call!(H5Sget_simple_extent_ndims(self.id())).unwrap_or(0) as _ - } - - fn dims(&self) -> Vec { - let ndim = self.ndim(); - if ndim > 0 { - let mut dims: Vec = Vec::with_capacity(ndim); - unsafe { - dims.set_len(ndim); - } - if h5call!(H5Sget_simple_extent_dims(self.id(), dims.as_mut_ptr(), ptr::null_mut())) - .is_ok() - { - return dims.iter().cloned().map(|x| x as _).collect(); - } - } - vec![] - } -} - -#[cfg(test)] -pub mod tests { - use crate::internal_prelude::*; - - #[test] - pub fn test_dimension() { - fn f(d: D) -> (usize, Vec, Ix) { - (d.ndim(), d.dims(), d.size()) - } - - assert_eq!(f(()), (0, vec![], 1)); - assert_eq!(f(&()), (0, vec![], 1)); - assert_eq!(f(2), (1, vec![2], 2)); - assert_eq!(f(&3), (1, vec![3], 3)); - assert_eq!(f((4,)), (1, vec![4], 4)); - assert_eq!(f(&(5,)), (1, vec![5], 5)); - assert_eq!(f((1, 2)), (2, vec![1, 2], 2)); - assert_eq!(f(&(3, 4)), (2, vec![3, 4], 12)); - assert_eq!(f(vec![2, 3]), (2, vec![2, 3], 6)); - assert_eq!(f(&vec![4, 5]), (2, vec![4, 5], 20)); - } - - #[test] - pub fn test_debug() { - assert_eq!(format!("{:?}", Dataspace::try_new((), true).unwrap()), ""); - assert_eq!(format!("{:?}", Dataspace::try_new(3, true).unwrap()), ""); - assert_eq!( - format!("{:?}", Dataspace::try_new((1, 2), true).unwrap()), - "" - ); - } - - #[test] - pub fn test_dataspace() { - let _e = silence_errors(); - assert_err!( - Dataspace::try_new(H5S_UNLIMITED as Ix, true), - "current dimension must have a specific size" - ); - - let d = Dataspace::try_new((5, 6), true).unwrap(); - assert_eq!((d.ndim(), d.dims(), d.size()), (2, vec![5, 6], 30)); - - assert_eq!(Dataspace::try_new((), true).unwrap().dims(), vec![]); - - assert_err!(Dataspace::from_id(H5I_INVALID_HID), "Invalid dataspace id"); - - let dc = d.copy(); - assert!(dc.is_valid()); - assert_ne!(dc.id(), d.id()); - assert_eq!((d.ndim(), d.dims(), d.size()), (dc.ndim(), dc.dims(), dc.size())); - - assert_eq!(Dataspace::try_new((5, 6), false).unwrap().maxdims(), vec![5, 6]); - assert_eq!(Dataspace::try_new((5, 6), false).unwrap().resizable(), false); - assert_eq!( - Dataspace::try_new((5, 6), true).unwrap().maxdims(), - vec![H5S_UNLIMITED as _, H5S_UNLIMITED as _] - ); - assert_eq!(Dataspace::try_new((5, 6), true).unwrap().resizable(), true); - } -} diff --git a/src/lib.rs b/src/lib.rs index 6dd581dc8..ae96cce44 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,20 @@ +//! HDF5 for Rust. +//! +//! This crate provides thread-safe Rust bindings and high-level wrappers for the `HDF5` +//! library API. Some of the features include: +//! +//! - Thread-safety with non-threadsafe libhdf5 builds guaranteed via reentrant mutexes. +//! - Native representation of most HDF5 types, including variable-length strings and arrays. +//! - Derive-macro for automatic mapping of user structs and enums to `HDF5` types. +//! - Multi-dimensional array reading/writing interface via `ndarray`. +//! +//! Direct low-level bindings are also available and provided in the `hdf5-sys` crate. +//! +//! Requires `HDF5` library of version 1.8.4 or later. Newer versions will enable additional +//! features of the library. Such items are marked in the documentation with a version number +//! indicating the required version of `HDF5`. The `have-direct` and `have-parallel` features +//! also indicates `HDF5` functionality. + #![cfg_attr(feature = "cargo-clippy", warn(clippy::pedantic))] #![cfg_attr(feature = "cargo-clippy", warn(clippy::nursery))] #![cfg_attr(feature = "cargo-clippy", warn(clippy::all))] @@ -16,21 +33,35 @@ #![cfg_attr(feature = "cargo-clippy", allow(clippy::must_use_candidate))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::wildcard_imports))] #![cfg_attr(feature = "cargo-clippy", allow(clippy::struct_excessive_bools))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::redundant_pub_crate))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::unnecessary_unwrap))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::unnecessary_wraps))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::upper_case_acronyms))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::missing_panics_doc))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::missing_const_for_fn))] +#![cfg_attr(feature = "cargo-clippy", allow(clippy::option_if_let_else))] #![cfg_attr(all(feature = "cargo-clippy", test), allow(clippy::cyclomatic_complexity))] #![cfg_attr(not(test), allow(dead_code))] +// To build docs locally: +// RUSTDOCFLAGS="--cfg docsrs" cargo +nightly doc --features blosc,lzf +#![cfg_attr(docsrs, feature(doc_cfg))] -#[cfg(all(feature = "mpio", not(h5_have_parallel)))] +#[cfg(all(feature = "mpio", not(feature = "have-parallel")))] compile_error!("Enabling \"mpio\" feature requires HDF5 library built with MPI support"); mod export { pub use crate::{ class::from_id, dim::{Dimension, Ix}, - error::{silence_errors, Error, Result}, - filters::Filters, + error::{silence_errors, Error, ErrorFrame, ErrorStack, ExpandedErrorStack, Result}, + hl::extents::{Extent, Extents, SimpleExtents}, + hl::selection::{Hyperslab, Selection, SliceOrIndex}, hl::{ - Container, Conversion, Dataset, DatasetBuilder, Dataspace, Datatype, File, FileBuilder, - Group, Location, Object, PropertyList, Reader, Writer, + Attribute, AttributeBuilder, AttributeBuilderData, AttributeBuilderEmpty, + AttributeBuilderEmptyShape, ByteReader, Container, Conversion, Dataset, DatasetBuilder, + DatasetBuilderData, DatasetBuilderEmpty, DatasetBuilderEmptyShape, Dataspace, Datatype, + File, FileBuilder, Group, LinkInfo, LinkType, Location, LocationInfo, LocationToken, + LocationType, Object, PropertyList, Reader, Writer, }, }; @@ -45,10 +76,11 @@ mod export { } pub mod dataset { - #[cfg(hdf5_1_10_5)] + #[cfg(feature = "1.10.5")] pub use crate::hl::dataset::ChunkInfo; pub use crate::hl::dataset::{Chunk, Dataset, DatasetBuilder}; pub use crate::hl::plist::dataset_access::*; + pub use crate::hl::plist::dataset_create::*; } pub mod datatype { @@ -62,20 +94,31 @@ mod export { } pub mod plist { - pub use crate::hl::plist::dataset_access::DatasetAccess; - pub use crate::hl::plist::file_access::FileAccess; - pub use crate::hl::plist::file_create::FileCreate; + pub use crate::hl::plist::dataset_access::{DatasetAccess, DatasetAccessBuilder}; + pub use crate::hl::plist::dataset_create::{DatasetCreate, DatasetCreateBuilder}; + pub use crate::hl::plist::file_access::{FileAccess, FileAccessBuilder}; + pub use crate::hl::plist::file_create::{FileCreate, FileCreateBuilder}; + pub use crate::hl::plist::link_create::{LinkCreate, LinkCreateBuilder}; pub use crate::hl::plist::{PropertyList, PropertyListClass}; pub mod dataset_access { pub use crate::hl::plist::dataset_access::*; } + pub mod dataset_create { + pub use crate::hl::plist::dataset_create::*; + } pub mod file_access { pub use crate::hl::plist::file_access::*; } pub mod file_create { pub use crate::hl::plist::file_create::*; } + pub mod link_create { + pub use crate::hl::plist::link_create::*; + } + } + pub mod filters { + pub use crate::hl::filters::*; } } @@ -88,7 +131,6 @@ mod class; mod dim; mod error; -mod filters; #[doc(hidden)] pub mod globals; mod handle; @@ -113,9 +155,9 @@ mod internal_prelude { pub use crate::{ class::ObjectClass, dim::Dimension, - error::{h5check, silence_errors}, + error::{h5check, H5ErrorCode}, export::*, - handle::{get_id_type, is_valid_user_id, Handle}, + handle::Handle, hl::plist::PropertyListClass, sync::sync, util::{ @@ -143,16 +185,16 @@ pub fn library_version() -> (u8, u8, u8) { /// Returns true if the HDF5 library is threadsafe. pub fn is_library_threadsafe() -> bool { - #[cfg(hdf5_1_8_16)] + #[cfg(feature = "1.8.16")] { use self::internal_prelude::hbool_t; use hdf5_sys::h5::H5is_library_threadsafe; let mut ts: hbool_t = 0; h5call!(H5is_library_threadsafe(&mut ts)).map(|_| ts > 0).unwrap_or(false) } - #[cfg(not(hdf5_1_8_16))] + #[cfg(not(feature = "1.8.16"))] { - cfg!(h5_have_threadsafe) + cfg!(feature = "threadsafe") } } diff --git a/src/macros.rs b/src/macros.rs index 88c379625..3f23fe972 100644 --- a/src/macros.rs +++ b/src/macros.rs @@ -4,7 +4,7 @@ use crate::internal_prelude::*; macro_rules! fail { ($err:expr) => ( - return Err(From::from($err)); + return Err(From::from($err)) ); ($fmt:expr, $($arg:tt)*) => ( @@ -44,7 +44,7 @@ macro_rules! assert_err { panic!("assertion failed: not an error in `{}`", stringify!($expr)); } Err(ref value) => { - let desc = value.description().to_string(); + let desc = value.to_string(); if !desc.contains($err) { panic!( "assertion failed: error message `{}` doesn't contain `{}` in `{}`", @@ -70,7 +70,7 @@ macro_rules! assert_err_re { Err(ref value) => { use regex::Regex; let re = Regex::new($err).unwrap(); - let desc = value.description().to_string(); + let desc = value.to_string(); if !re.is_match(desc.as_ref()) { panic!( "assertion failed: error message `{}` doesn't match `{}` in `{}`", diff --git a/src/sync.rs b/src/sync.rs index b817ba345..2d2446431 100644 --- a/src/sync.rs +++ b/src/sync.rs @@ -1,13 +1,33 @@ use lazy_static::lazy_static; use parking_lot::ReentrantMutex; +lazy_static! { + pub(crate) static ref LIBRARY_INIT: () = { + // No functions called here must try to create the LOCK, + // as this could cause a deadlock in initialisation + unsafe { + // Ensure hdf5 does not invalidate handles which might + // still be live on other threads on program exit + ::hdf5_sys::h5::H5dont_atexit(); + ::hdf5_sys::h5::H5open(); + // Ignore errors on stdout + crate::error::silence_errors_no_sync(true); + // Register filters lzf/blosc if available + crate::hl::filters::register_filters(); + } + }; +} + /// Guards the execution of the provided closure with a recursive static mutex. pub fn sync(func: F) -> T where F: FnOnce() -> T, { lazy_static! { - static ref LOCK: ReentrantMutex<()> = ReentrantMutex::new(()); + static ref LOCK: ReentrantMutex<()> = { + lazy_static::initialize(&LIBRARY_INIT); + ReentrantMutex::new(()) + }; } let _guard = LOCK.lock(); func() @@ -32,4 +52,16 @@ mod tests { let g4 = LOCK.lock(); assert_eq!(*g4, ()); } + + #[test] + // Test for locking behaviour on initialisation + pub fn lock_part1() { + let _ = *crate::globals::H5P_ROOT; + } + + #[test] + // Test for locking behaviour on initialisation + pub fn lock_part2() { + let _ = h5call!(*crate::globals::H5P_ROOT); + } } diff --git a/src/test.rs b/src/test.rs index b17c2b25a..1424cf7bc 100644 --- a/src/test.rs +++ b/src/test.rs @@ -1,23 +1,22 @@ use std::path::PathBuf; -use tempdir::TempDir; +use tempfile::tempdir; use crate::internal_prelude::*; -pub fn with_tmp_dir(func: F) { - let dir = TempDir::new_in(".", "tmp").unwrap(); +pub fn with_tmp_dir T>(func: F) -> T { + let dir = tempdir().unwrap(); let path = dir.path().to_path_buf(); - let _e = silence_errors(); - func(path); + func(path) } -pub fn with_tmp_path(func: F) { +pub fn with_tmp_path T>(func: F) -> T { with_tmp_dir(|dir| func(dir.join("foo.h5"))) } -pub fn with_tmp_file(func: F) { +pub fn with_tmp_file T>(func: F) -> T { with_tmp_path(|path| { let file = File::create(&path).unwrap(); - func(file); + func(file) }) } diff --git a/src/util.rs b/src/util.rs index 3a943657e..51b78e426 100644 --- a/src/util.rs +++ b/src/util.rs @@ -1,11 +1,9 @@ use std::borrow::Borrow; +use std::convert::TryInto; use std::ffi::{CStr, CString}; use std::ptr; use std::str; -use num_integer::Integer; -use num_traits::{cast, NumCast}; - use crate::internal_prelude::*; /// Convert a zero-terminated string (`const char *`) into a `String`. @@ -16,6 +14,7 @@ pub fn string_from_cstr(string: *const c_char) -> String { /// Convert a `String` or a `&str` into a zero-terminated string (`const char *`). pub fn to_cstring>(string: S) -> Result { let string = string.borrow(); + #[allow(clippy::map_err_ignore)] CString::new(string).map_err(|_| format!("null byte in string: {:?}", string).into()) } @@ -34,20 +33,20 @@ pub fn string_to_fixed_bytes(s: &str, buf: &mut [c_char]) { } let bytes = s.as_bytes(); unsafe { - ptr::copy_nonoverlapping(bytes.as_ptr(), buf.as_mut_ptr() as *mut _, bytes.len()); + ptr::copy_nonoverlapping(bytes.as_ptr(), buf.as_mut_ptr().cast(), bytes.len()); } for c in &mut buf[bytes.len()..] { *c = 0; } } -#[cfg(hdf5_1_8_13)] +#[cfg(feature = "1.8.13")] pub fn h5_free_memory(mem: *mut c_void) { use hdf5_sys::h5::H5free_memory; unsafe { H5free_memory(mem) }; } -#[cfg(not(hdf5_1_8_13))] +#[cfg(not(feature = "1.8.13"))] pub fn h5_free_memory(mem: *mut c_void) { // this may fail in debug builds of HDF5 use libc::free; @@ -58,9 +57,9 @@ pub fn h5_free_memory(mem: *mut c_void) { pub fn get_h5_str(func: F) -> Result where F: Fn(*mut c_char, size_t) -> T, - T: Integer + NumCast, + T: TryInto, { - let len = 1 + cast::(func(ptr::null_mut(), 0)).unwrap(); + let len = 1_isize + (func(ptr::null_mut(), 0)).try_into().unwrap_or(-1); ensure!(len > 0, "negative string length in get_h5_str()"); if len == 1 { Ok("".to_owned()) diff --git a/tests/common/gen.rs b/tests/common/gen.rs index 40afb069e..5f69a458f 100644 --- a/tests/common/gen.rs +++ b/tests/common/gen.rs @@ -1,54 +1,54 @@ +use std::convert::TryFrom; use std::fmt; use std::iter; use hdf5::types::{FixedAscii, FixedUnicode, VarLenArray, VarLenAscii, VarLenUnicode}; use hdf5::H5Type; -use hdf5_types::Array; -use ndarray::{ArrayD, SliceInfo, SliceOrIndex}; +use ndarray::{ArrayD, SliceInfo, SliceInfoElem}; use rand::distributions::{Alphanumeric, Uniform}; use rand::prelude::{Rng, SliceRandom}; pub fn gen_shape(rng: &mut R, ndim: usize) -> Vec { - iter::repeat(()).map(|_| rng.gen_range(0, 11)).take(ndim).collect() + iter::repeat(()).map(|_| rng.gen_range(0..11)).take(ndim).collect() } pub fn gen_ascii(rng: &mut R, len: usize) -> String { - iter::repeat(()).map(|_| rng.sample(Alphanumeric)).take(len).collect() + iter::repeat(()).map(|_| rng.sample(Alphanumeric)).map(char::from).take(len).collect() } /// Generate a random slice of elements inside the given `shape` dimension. pub fn gen_slice( rng: &mut R, shape: &[usize], -) -> SliceInfo, ndarray::IxDyn> { - let rand_slice: Vec = +) -> SliceInfo, ndarray::IxDyn, ndarray::IxDyn> { + let rand_slice: Vec = shape.into_iter().map(|s| gen_slice_one_dim(rng, *s)).collect(); - SliceInfo::new(rand_slice).unwrap() + SliceInfo::try_from(rand_slice).unwrap() } /// Generate a random 1D slice of the interval [0, shape). -fn gen_slice_one_dim(rng: &mut R, shape: usize) -> ndarray::SliceOrIndex { +fn gen_slice_one_dim(rng: &mut R, shape: usize) -> ndarray::SliceInfoElem { if shape == 0 { - return ndarray::SliceOrIndex::Slice { start: 0, end: None, step: 1 }; + return ndarray::SliceInfoElem::Slice { start: 0, end: None, step: 1 }; } if rng.gen_bool(0.1) { - ndarray::SliceOrIndex::Index(rng.gen_range(0, shape) as isize) + ndarray::SliceInfoElem::Index(rng.gen_range(0..shape) as isize) } else { - let start = rng.gen_range(0, shape) as isize; + let start = rng.gen_range(0..shape) as isize; let end = if rng.gen_bool(0.5) { None } else if rng.gen_bool(0.9) { - Some(rng.gen_range(start, shape as isize)) + Some(rng.gen_range(start..shape as isize)) } else { // Occasionally generate a slice with end < start. - Some(rng.gen_range(0, shape as isize)) + Some(rng.gen_range(0..shape as isize)) }; - let step = if rng.gen_bool(0.9) { 1isize } else { rng.gen_range(1, shape * 2) as isize }; + let step = if rng.gen_bool(0.9) { 1isize } else { rng.gen_range(1..shape * 2) as isize }; - ndarray::SliceOrIndex::Slice { start, end, step } + ndarray::SliceInfoElem::Slice { start, end, step } } } @@ -108,9 +108,9 @@ where ArrayD::from_shape_vec(shape, vec).unwrap() } -impl> Gen for FixedAscii { +impl Gen for FixedAscii { fn gen(rng: &mut R) -> Self { - let len = rng.sample(Uniform::new_inclusive(0, A::capacity())); + let len = rng.sample(Uniform::new_inclusive(0, N)); let dist = Uniform::new_inclusive(0, 127); let mut v = Vec::with_capacity(len); for _ in 0..len { @@ -120,9 +120,9 @@ impl> Gen for FixedAscii { } } -impl> Gen for FixedUnicode { +impl Gen for FixedUnicode { fn gen(rng: &mut R) -> Self { - let len = rng.sample(Uniform::new_inclusive(0, A::capacity())); + let len = rng.sample(Uniform::new_inclusive(0, N)); let mut s = String::new(); for _ in 0..len { let c = rng.gen::(); @@ -200,8 +200,8 @@ impl Gen for TupleStruct { #[derive(H5Type, Clone, Debug, PartialEq)] #[repr(C)] pub struct FixedStruct { - fa: FixedAscii<[u8; 3]>, - fu: FixedUnicode<[u8; 11]>, + fa: FixedAscii<3>, + fu: FixedUnicode<11>, tuple: (i8, u64, f32), array: [TupleStruct; 2], } @@ -230,3 +230,42 @@ impl Gen for VarLenStruct { VarLenStruct { va: Gen::gen(rng), vu: Gen::gen(rng), vla: Gen::gen(rng) } } } + +#[derive(H5Type, Clone, Debug, PartialEq)] +#[repr(C)] +pub struct RenameStruct { + first: i32, + #[hdf5(rename = "field.second")] + second: i64, +} + +impl Gen for RenameStruct { + fn gen(rng: &mut R) -> Self { + RenameStruct { first: Gen::gen(rng), second: Gen::gen(rng) } + } +} + +#[derive(H5Type, Clone, Copy, Debug, PartialEq)] +#[repr(C)] +pub struct RenameTupleStruct(#[hdf5(rename = "my_boolean")] bool, #[hdf5(rename = "my_enum")] Enum); + +impl Gen for RenameTupleStruct { + fn gen(rng: &mut R) -> Self { + RenameTupleStruct(Gen::gen(rng), Gen::gen(rng)) + } +} + +#[derive(H5Type, Clone, Copy, Debug, PartialEq)] +#[repr(i16)] +pub enum RenameEnum { + #[hdf5(rename = "coord.x")] + X = -2, + #[hdf5(rename = "coord.y")] + Y = 3, +} + +impl Gen for RenameEnum { + fn gen(rng: &mut R) -> Self { + *[RenameEnum::X, RenameEnum::Y].choose(rng).unwrap() + } +} diff --git a/tests/common/macros.rs b/tests/common/macros.rs index dcc45e7e4..ba618826d 100644 --- a/tests/common/macros.rs +++ b/tests/common/macros.rs @@ -6,7 +6,7 @@ macro_rules! assert_err { panic!("assertion failed: not an error in `{}`", stringify!($expr)); } Err(ref value) => { - let desc = value.description().to_string(); + let desc = value.to_string(); if !desc.contains($err) { panic!( "assertion failed: error message `{}` doesn't contain `{}` in `{}`", diff --git a/tests/test_dataset.rs b/tests/test_dataset.rs index dcf15dda7..4f02a95d4 100644 --- a/tests/test_dataset.rs +++ b/tests/test_dataset.rs @@ -1,13 +1,18 @@ +use std::convert::TryFrom; use std::fmt; +use std::io::{Read, Seek, SeekFrom}; use ndarray::{s, Array1, Array2, ArrayD, IxDyn, SliceInfo}; use rand::prelude::{Rng, SeedableRng, SmallRng}; -use hdf5_types::TypeDescriptor; +use hdf5_types::{H5Type, TypeDescriptor}; mod common; -use self::common::gen::{gen_arr, gen_slice, Enum, FixedStruct, Gen, TupleStruct, VarLenStruct}; +use self::common::gen::{ + gen_arr, gen_slice, Enum, FixedStruct, Gen, RenameEnum, RenameStruct, RenameTupleStruct, + TupleStruct, VarLenStruct, +}; use self::common::util::new_in_memory_file; fn test_write_slice( @@ -28,7 +33,7 @@ where // Write these elements into their 'correct' places in the matrix { let dsw = ds.as_writer(); - dsw.write_slice(&sliced_array_copy, &slice)?; + dsw.write_slice(&sliced_array_copy, slice.clone())?; } // Read back out the random from the full dataset @@ -60,7 +65,7 @@ where let slice = gen_slice(rng, shape); // Do a sliced HDF5 read - let sliced_read: ArrayD = dsr.read_slice(&slice).unwrap(); + let sliced_read: ArrayD = dsr.read_slice(slice.clone()).unwrap(); // Slice the full dataset let sliced_dataset = arr.slice(slice.as_ref()); @@ -76,9 +81,10 @@ where let mut bad_shape = Vec::from(shape); bad_shape.push(1); let bad_slice = gen_slice(rng, &bad_shape); - let bad_slice: SliceInfo<_, IxDyn> = ndarray::SliceInfo::new(bad_slice.as_slice()).unwrap(); + let bad_slice: SliceInfo<_, IxDyn, IxDyn> = + ndarray::SliceInfo::try_from(bad_slice.as_slice()).unwrap(); - let bad_sliced_read: hdf5::Result> = dsr.read_slice(&bad_slice); + let bad_sliced_read: hdf5::Result> = dsr.read_slice(bad_slice); assert!(bad_sliced_read.is_err()); // Tests for dimension-dropping slices with static dimensionality. @@ -166,6 +172,85 @@ where Ok(()) } +fn test_byte_read_seek_impl(ds: &hdf5::Dataset, arr: &ArrayD, ndim: usize) -> hdf5::Result<()> { + let mut rng = SmallRng::seed_from_u64(42); + ds.write(arr)?; + + // Read whole + let reader = ds.as_byte_reader(); + let mut reader = if ndim != 1 { + assert!(reader.is_err()); + return Ok(()); + } else { + reader.unwrap() + }; + let mut out_bytes = vec![0u8; arr.len()]; + reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed"); + assert_eq!(out_bytes.as_slice(), arr.as_slice().unwrap()); + + // Read in chunks + let mut reader = reader.clone(); + reader.seek(std::io::SeekFrom::Start(0)).expect("io::Seek failed"); + let mut pos = 0; + while pos < arr.len() { + let chunk_len: usize = rng.gen_range(1..arr.len() + 1); + let mut chunk = vec![0u8; chunk_len]; + let n_read = reader.read(&mut chunk).expect("io::Read failed"); + if pos + chunk_len < arr.len() { + // We did not read until end. Thus, the chunk should be fully filled. + assert_eq!(chunk_len, n_read); + } + assert_eq!(&chunk[..n_read], arr.slice(s![pos..pos + n_read]).as_slice().unwrap()); + pos += chunk_len; + } + + // Seek to the begining and read again + reader.seek(SeekFrom::Start(0)).expect("io::Seek failed"); + let mut out_bytes = vec![0u8; arr.len()]; + reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed"); + assert_eq!(out_bytes.as_slice(), arr.as_slice().unwrap()); + + // Seek to a random position from start + let pos = rng.gen_range(0..arr.len() + 1) as u64; + let seeked_pos = reader.seek(SeekFrom::Start(pos)).expect("io::Seek failed") as usize; + let mut out_bytes = vec![0u8; arr.len() - seeked_pos]; + reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed"); + assert_eq!(out_bytes.as_slice(), arr.slice(s![seeked_pos..]).as_slice().unwrap()); + + // Seek from current position + let orig_pos = reader.seek(SeekFrom::Start(pos)).expect("io::Seek failed") as i64; + let rel_pos = rng.gen_range(-(arr.len() as i64)..arr.len() as i64 + 1); + let pos_res = reader.seek(SeekFrom::Current(rel_pos)); + if (rel_pos + orig_pos) < 0 { + assert!(pos_res.is_err()) // We cannot seek before start + } else { + let seeked_pos = pos_res.unwrap() as usize; + assert_eq!(rel_pos + orig_pos, seeked_pos as i64); + let mut out_bytes = vec![0u8; arr.len() - seeked_pos]; + reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed"); + assert_eq!(out_bytes.as_slice(), arr.slice(s![seeked_pos..]).as_slice().unwrap()); + } + + // Seek to a random position from end + let pos = -(rng.gen_range(0..arr.len() + 1) as i64); + let seeked_pos = reader.seek(SeekFrom::End(pos)).expect("io::Seek failed") as usize; + assert_eq!(pos, seeked_pos as i64 - arr.len() as i64); + let mut out_bytes = vec![0u8; arr.len() - seeked_pos]; + reader.read(&mut out_bytes.as_mut_slice()).expect("io::Read failed"); + assert_eq!(out_bytes.as_slice(), arr.slice(s![seeked_pos..]).as_slice().unwrap()); + + // Seek before start + assert!(reader.seek(SeekFrom::End(-(arr.len() as i64) - 1)).is_err()); + + // Test stream position start + // Requires Rust 1.55.0: reader.rewind().expect("io::Seek::rewind failed"); + assert_eq!(0, reader.seek(SeekFrom::Start(0)).unwrap()); + assert_eq!(0, reader.stream_position().unwrap()); + assert_eq!(0, reader.seek(SeekFrom::End(-(arr.len() as i64))).unwrap()); + assert_eq!(0, reader.stream_position().unwrap()); + Ok(()) +} + fn test_read_write() -> hdf5::Result<()> where T: hdf5::H5Type + fmt::Debug + PartialEq + Gen + Clone, @@ -185,10 +270,8 @@ where for mode in 0..4 { let arr: ArrayD = gen_arr(&mut rng, ndim); - let ds: hdf5::Dataset = file - .new_dataset::() - .packed(*packed) - .create("x", arr.shape().to_vec())?; + let ds: hdf5::Dataset = + file.new_dataset::().packed(*packed).shape(arr.shape()).create("x")?; let ds = scopeguard::guard(ds, |ds| { drop(ds); drop(file.unlink("x")); @@ -257,3 +340,42 @@ fn test_read_write_tuples() -> hdf5::Result<()> { test_read_write::<(i8, u64, f32)>()?; Ok(()) } + +#[test] +fn test_create_on_databuilder() { + let file = new_in_memory_file().unwrap(); + + let _ds = file.new_dataset_builder().empty::().create("ds1").unwrap(); + let _ds = file.new_dataset_builder().with_data(&[1_i32, 2, 3]).create("ds2").unwrap(); + let _ds = file.new_dataset::().create("ds3").unwrap(); + let _ds = file.new_dataset::().shape(2).create("ds4").unwrap(); +} + +#[test] +fn test_read_write_rename_fields() -> hdf5::Result<()> { + test_read_write::()?; + test_read_write::()?; + test_read_write::()?; + Ok(()) +} + +#[test] +fn test_byte_read_seek() -> hdf5::Result<()> { + let mut rng = SmallRng::seed_from_u64(42); + let file = new_in_memory_file()?; + + for ndim in 0..=2 { + for _ in 0..=20 { + let arr: ArrayD = gen_arr(&mut rng, ndim); + + let ds: hdf5::Dataset = file.new_dataset::().shape(arr.shape()).create("x")?; + let ds = scopeguard::guard(ds, |ds| { + drop(ds); + drop(file.unlink("x")); + }); + + test_byte_read_seek_impl(&ds, &arr, ndim)?; + } + } + Ok(()) +} diff --git a/tests/test_datatypes.rs b/tests/test_datatypes.rs index 2c41be5ae..8289ae1b4 100644 --- a/tests/test_datatypes.rs +++ b/tests/test_datatypes.rs @@ -31,8 +31,8 @@ pub fn test_datatype_roundtrip() { check_roundtrip!(bool, TD::Boolean); check_roundtrip!([bool; 5], TD::FixedArray(Box::new(TD::Boolean), 5)); check_roundtrip!(VarLenArray, TD::VarLenArray(Box::new(TD::Boolean))); - check_roundtrip!(FixedAscii<[_; 5]>, TD::FixedAscii(5)); - check_roundtrip!(FixedUnicode<[_; 5]>, TD::FixedUnicode(5)); + check_roundtrip!(FixedAscii<5>, TD::FixedAscii(5)); + check_roundtrip!(FixedUnicode<5>, TD::FixedUnicode(5)); check_roundtrip!(VarLenAscii, TD::VarLenAscii); check_roundtrip!(VarLenUnicode, TD::VarLenUnicode); @@ -42,7 +42,7 @@ pub fn test_datatype_roundtrip() { enum X { A = 1, B = -2, - }; + } let x_desc = TD::Enum(EnumType { size: IntSize::U8, signed: true, @@ -53,12 +53,30 @@ pub fn test_datatype_roundtrip() { }); check_roundtrip!(X, x_desc); + #[allow(dead_code)] + #[derive(H5Type)] + #[repr(i64)] + enum Y { + #[hdf5(rename = "variant.a")] + A = 1, + B = -2, + } + let y_desc = TD::Enum(EnumType { + size: IntSize::U8, + signed: true, + members: vec![ + EnumMember { name: "variant.a".into(), value: 1 }, + EnumMember { name: "B".into(), value: -2i64 as _ }, + ], + }); + check_roundtrip!(Y, y_desc); + #[derive(H5Type)] #[repr(C)] struct A { a: i64, b: u64, - }; + } let a_desc = TD::Compound(CompoundType { fields: vec![ CompoundField::typed::("a", 0, 0), @@ -73,7 +91,7 @@ pub fn test_datatype_roundtrip() { struct C { a: [X; 2], b: [[A; 4]; 32], - }; + } let a_arr_desc = TD::FixedArray(Box::new(x_desc), 2); let b_arr_desc = TD::FixedArray(Box::new(TD::FixedArray(Box::new(a_desc), 4)), 32); let c_desc = TD::Compound(CompoundType { @@ -84,11 +102,40 @@ pub fn test_datatype_roundtrip() { size: 2 * 8 + 4 * 32 * 16, }); check_roundtrip!(C, c_desc); + + #[derive(H5Type)] + #[repr(C)] + struct D { + #[hdf5(rename = "field.one")] + a: f64, + #[hdf5(rename = "field.two")] + b: u64, + } + let d_desc = TD::Compound(CompoundType { + fields: vec![ + CompoundField::typed::("field.one", 0, 0), + CompoundField::typed::("field.two", 8, 1), + ], + size: 16, + }); + check_roundtrip!(D, d_desc); + + #[derive(H5Type)] + #[repr(C)] + struct E(#[hdf5(rename = "alpha")] u64, f64); + let e_desc = TD::Compound(CompoundType { + fields: vec![ + CompoundField::typed::("alpha", 0, 0), + CompoundField::typed::("1", 8, 1), + ], + size: 16, + }); + check_roundtrip!(E, e_desc); } #[test] pub fn test_invalid_datatype() { - assert_err!(from_id::(H5I_INVALID_HID), "Invalid datatype id"); + assert_err!(from_id::(H5I_INVALID_HID), "Invalid handle id"); } #[test] diff --git a/tests/test_plist.rs b/tests/test_plist.rs index 26e9be755..fdec433e0 100644 --- a/tests/test_plist.rs +++ b/tests/test_plist.rs @@ -1,7 +1,5 @@ -#[macro_use] -extern crate mashup; - use std::mem; +use std::str::FromStr; use hdf5::dataset::*; use hdf5::file::*; @@ -14,11 +12,10 @@ macro_rules! test_pl { ($ty:ident, $field:ident ($($arg:expr,)+): $($name:ident=$value:expr,)+) => ({ let mut b = $ty::build(); - mashup! { m["get" $field] = get_ $field; } b.$field($($arg,)+); let fapl = b.finish()?; $(assert_eq!(fapl.$field().$name, $value);)+ - m! { $(assert_eq!(fapl."get" $field()?.$name, $value);)+ } + paste::paste! { $(assert_eq!(fapl.[]()?.$name, $value);)+ } }); ($ty:ident, $field:ident: $($name:ident=$value:expr),+) => ( @@ -31,11 +28,10 @@ macro_rules! test_pl { ($ty:ident, $field:ident ($arg:expr): $value:expr) => ({ let mut b = $ty::build(); - mashup! { m["get" $field] = get_ $field; } b.$field($arg); let fapl = b.finish()?; assert_eq!(fapl.$field(), $value); - m! { assert_eq!(fapl."get" $field()?, $value); } + paste::paste! { assert_eq!(fapl.[]()?, $value); } }); ($ty:ident, $field:ident: $value:expr) => ({ @@ -132,7 +128,39 @@ fn test_fcpl_set_shared_mesg_indexes() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_10_1)] +fn test_fcpl_obj_track_times() -> hdf5::Result<()> { + assert_eq!(FC::try_new()?.get_obj_track_times()?, true); + assert_eq!(FC::try_new()?.obj_track_times(), true); + test_pl!(FC, obj_track_times: true); + test_pl!(FC, obj_track_times: false); + Ok(()) +} + +#[test] +fn test_fcpl_attr_phase_change() -> hdf5::Result<()> { + assert_eq!(FC::try_new()?.get_attr_phase_change()?, AttrPhaseChange::default()); + assert_eq!(FC::try_new()?.attr_phase_change(), AttrPhaseChange::default()); + let pl = FCB::new().attr_phase_change(34, 21).finish()?; + let expected = AttrPhaseChange { max_compact: 34, min_dense: 21 }; + assert_eq!(pl.get_attr_phase_change()?, expected); + assert_eq!(pl.attr_phase_change(), expected); + assert_eq!(FCB::from_plist(&pl)?.finish()?.get_attr_phase_change()?, expected); + assert!(FCB::new().attr_phase_change(12, 34).finish().is_err()); + Ok(()) +} + +#[test] +fn test_fcpl_attr_creation_order() -> hdf5::Result<()> { + assert_eq!(FC::try_new()?.get_attr_creation_order()?.bits(), 0); + assert_eq!(FC::try_new()?.attr_creation_order().bits(), 0); + test_pl!(FC, attr_creation_order: AttrCreationOrder::TRACKED); + test_pl!(FC, attr_creation_order: AttrCreationOrder::TRACKED | AttrCreationOrder::INDEXED); + assert!(FCB::new().attr_creation_order(AttrCreationOrder::INDEXED).finish().is_err()); + Ok(()) +} + +#[test] +#[cfg(feature = "1.10.1")] fn test_fcpl_set_file_space_page_size() -> hdf5::Result<()> { test_pl!(FC, file_space_page_size: 512); test_pl!(FC, file_space_page_size: 999); @@ -140,7 +168,7 @@ fn test_fcpl_set_file_space_page_size() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] fn test_fcpl_set_file_space_strategy() -> hdf5::Result<()> { test_pl!(FC, file_space_strategy: FileSpaceStrategy::PageAggregation); test_pl!(FC, file_space_strategy: FileSpaceStrategy::None); @@ -195,16 +223,16 @@ fn test_fapl_driver_core() -> hdf5::Result<()> { let d = check_matches!(b.finish()?.get_driver()?, d, FileDriver::Core(d)); assert_eq!(d.increment, 1024 * 1024); assert_eq!(d.filebacked, false); - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] assert_eq!(d.write_tracking, 0); b.core_options(123, true); - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] b.write_tracking(456); let d = check_matches!(b.finish()?.get_driver()?, d, FileDriver::Core(d)); assert_eq!(d.increment, 123); assert_eq!(d.filebacked, true); - #[cfg(hdf5_1_8_13)] + #[cfg(feature = "1.8.13")] assert_eq!(d.write_tracking, 456); b.core_filebacked(false); @@ -309,7 +337,7 @@ fn test_fapl_driver_mpio() -> hdf5::Result<()> { } #[test] -#[cfg(h5_have_direct)] +#[cfg(feature = "have-direct")] fn test_fapl_driver_direct() -> hdf5::Result<()> { let mut b = FileAccess::build(); @@ -451,7 +479,7 @@ fn test_fapl_set_mdc_config() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_8_7)] +#[cfg(feature = "1.8.7")] fn test_fapl_set_elink_file_cache_size() -> hdf5::Result<()> { test_pl!(FA, elink_file_cache_size: 0); test_pl!(FA, elink_file_cache_size: 17); @@ -459,7 +487,7 @@ fn test_fapl_set_elink_file_cache_size() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] fn test_fapl_set_metadata_read_attempts() -> hdf5::Result<()> { test_pl!(FA, metadata_read_attempts: 1); test_pl!(FA, metadata_read_attempts: 17); @@ -467,7 +495,7 @@ fn test_fapl_set_metadata_read_attempts() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] fn test_fapl_set_mdc_log_options() -> hdf5::Result<()> { test_pl!(FA, mdc_log_options: is_enabled = true, location = "abc", start_on_access = false,); test_pl!(FA, mdc_log_options: is_enabled = false, location = "", start_on_access = true,); @@ -475,7 +503,7 @@ fn test_fapl_set_mdc_log_options() -> hdf5::Result<()> { } #[test] -#[cfg(all(hdf5_1_10_0, feature = "mpio"))] +#[cfg(all(feature = "1.10.0", feature = "mpio"))] fn test_fapl_set_all_coll_metadata_ops() -> hdf5::Result<()> { test_pl!(FA, all_coll_metadata_ops: true); test_pl!(FA, all_coll_metadata_ops: false); @@ -483,7 +511,7 @@ fn test_fapl_set_all_coll_metadata_ops() -> hdf5::Result<()> { } #[test] -#[cfg(all(hdf5_1_10_0, feature = "mpio"))] +#[cfg(all(feature = "1.10.0", feature = "mpio"))] fn test_fapl_set_coll_metadata_write() -> hdf5::Result<()> { test_pl!(FA, coll_metadata_write: true); test_pl!(FA, coll_metadata_write: false); @@ -491,18 +519,32 @@ fn test_fapl_set_coll_metadata_write() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_10_2)] +#[cfg(feature = "1.10.2")] fn test_fapl_set_libver_bounds() -> hdf5::Result<()> { test_pl!(FA, libver_bounds: low = LibraryVersion::Earliest, high = LibraryVersion::V18); test_pl!(FA, libver_bounds: low = LibraryVersion::Earliest, high = LibraryVersion::V110); test_pl!(FA, libver_bounds: low = LibraryVersion::V18, high = LibraryVersion::V18); test_pl!(FA, libver_bounds: low = LibraryVersion::V18, high = LibraryVersion::V110); test_pl!(FA, libver_bounds: low = LibraryVersion::V110, high = LibraryVersion::V110); + let make_lvb = |lv| LibVerBounds { low: lv, high: LibraryVersion::latest() }; + let mut b = FAB::new(); + b.libver_earliest(); + assert_eq!(b.finish()?.libver_bounds(), make_lvb(LibraryVersion::Earliest)); + assert_eq!(b.finish()?.libver(), LibraryVersion::Earliest); + b.libver_v18(); + assert_eq!(b.finish()?.libver_bounds(), make_lvb(LibraryVersion::V18)); + assert_eq!(b.finish()?.libver(), LibraryVersion::V18); + b.libver_v110(); + assert_eq!(b.finish()?.libver_bounds(), make_lvb(LibraryVersion::V110)); + assert_eq!(b.finish()?.libver(), LibraryVersion::V110); + b.libver_latest(); + assert_eq!(b.finish()?.libver_bounds(), make_lvb(LibraryVersion::latest())); + assert_eq!(b.finish()?.libver(), LibraryVersion::latest()); Ok(()) } #[test] -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] fn test_fapl_set_page_buffer_size() -> hdf5::Result<()> { test_pl!(FA, page_buffer_size: buf_size = 0, min_meta_perc = 0, min_raw_perc = 0); test_pl!(FA, page_buffer_size: buf_size = 0, min_meta_perc = 7, min_raw_perc = 9); @@ -511,7 +553,7 @@ fn test_fapl_set_page_buffer_size() -> hdf5::Result<()> { } #[test] -#[cfg(all(hdf5_1_10_1, not(h5_have_parallel)))] +#[cfg(all(feature = "1.10.1", not(feature = "have-parallel")))] fn test_fapl_set_evict_on_close() -> hdf5::Result<()> { test_pl!(FA, evict_on_close: true); test_pl!(FA, evict_on_close: false); @@ -519,7 +561,7 @@ fn test_fapl_set_evict_on_close() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_10_1)] +#[cfg(feature = "1.10.1")] fn test_fapl_set_mdc_image_config() -> hdf5::Result<()> { test_pl!(FA, mdc_image_config: generate_image = true); test_pl!(FA, mdc_image_config: generate_image = false); @@ -538,7 +580,7 @@ fn test_dapl_common() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_8_17)] +#[cfg(feature = "1.8.17")] fn test_dapl_set_efile_prefix() -> hdf5::Result<()> { assert_eq!(DA::try_new()?.get_efile_prefix().unwrap(), "".to_owned()); assert_eq!(DA::try_new()?.efile_prefix(), "".to_owned()); @@ -557,7 +599,7 @@ fn test_dapl_set_chunk_cache() -> hdf5::Result<()> { } #[test] -#[cfg(all(hdf5_1_10_0, feature = "mpio"))] +#[cfg(all(feature = "1.10.0", feature = "mpio"))] fn test_dapl_set_all_coll_metadata_ops() -> hdf5::Result<()> { test_pl!(DA, all_coll_metadata_ops: true); test_pl!(DA, all_coll_metadata_ops: false); @@ -565,7 +607,7 @@ fn test_dapl_set_all_coll_metadata_ops() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] fn test_dapl_set_virtual_view() -> hdf5::Result<()> { test_pl!(DA, virtual_view: VirtualView::FirstMissing); test_pl!(DA, virtual_view: VirtualView::LastAvailable); @@ -573,9 +615,320 @@ fn test_dapl_set_virtual_view() -> hdf5::Result<()> { } #[test] -#[cfg(hdf5_1_10_0)] +#[cfg(feature = "1.10.0")] fn test_dapl_set_virtual_printf_gap() -> hdf5::Result<()> { test_pl!(DA, virtual_printf_gap: 0); test_pl!(DA, virtual_printf_gap: 123); Ok(()) } + +type DC = DatasetCreate; +type DCB = DatasetCreateBuilder; + +#[test] +fn test_dcpl_common() -> hdf5::Result<()> { + test_pl_common!(DC, PropertyListClass::DatasetCreate, |b: &mut DCB| b + .layout(Layout::Compact) + .finish()); + Ok(()) +} + +#[test] +fn test_dcpl_set_chunk() -> hdf5::Result<()> { + assert!(DC::try_new()?.get_chunk()?.is_none()); + assert_eq!(DCB::new().chunk(&[3, 7]).finish()?.get_chunk()?, Some(vec![3, 7])); + assert_eq!(DCB::new().chunk((3, 7)).finish()?.chunk(), Some(vec![3, 7])); + let mut b = DCB::new().chunk([3, 7]).clone(); + assert_eq!(b.layout(Layout::Contiguous).finish()?.layout(), Layout::Chunked); + assert_eq!(b.layout(Layout::Compact).finish()?.layout(), Layout::Chunked); + #[cfg(feature = "1.10.0")] + assert_eq!(b.layout(Layout::Virtual).finish()?.layout(), Layout::Chunked); + assert!(b.no_chunk().finish()?.chunk().is_none()); + assert!(DCB::new().layout(Layout::Contiguous).finish()?.get_chunk()?.is_none()); + assert!(DCB::new().layout(Layout::Compact).finish()?.get_chunk()?.is_none()); + #[cfg(feature = "1.10.0")] + assert!(DCB::new().layout(Layout::Virtual).finish()?.get_chunk()?.is_none()); + assert_eq!(DCB::new().layout(Layout::Chunked).finish()?.get_chunk()?, Some(vec![])); + Ok(()) +} + +#[test] +fn test_dcpl_set_layout() -> hdf5::Result<()> { + check_matches!(DC::try_new()?.get_layout()?, (), Layout::Contiguous); + test_pl!(DC, layout: Layout::Contiguous); + test_pl!(DC, layout: Layout::Compact); + test_pl!(DC, layout: Layout::Chunked); + #[cfg(feature = "1.10.0")] + test_pl!(DC, layout: Layout::Virtual); + Ok(()) +} + +#[cfg(feature = "1.10.0")] +#[test] +fn test_dcpl_set_chunk_opts() -> hdf5::Result<()> { + assert!(DC::try_new()?.get_chunk_opts()?.is_none()); + let mut b = DCB::new(); + assert!(b.layout(Layout::Contiguous).finish()?.get_chunk_opts()?.is_none()); + assert!(b.layout(Layout::Compact).finish()?.get_chunk_opts()?.is_none()); + #[cfg(feature = "1.10.0")] + assert!(b.layout(Layout::Virtual).finish()?.get_chunk_opts()?.is_none()); + b.layout(Layout::Chunked); + assert_eq!(b.finish()?.get_chunk_opts()?, Some(ChunkOpts::empty())); + b.chunk_opts(ChunkOpts::empty()); + assert_eq!(b.finish()?.get_chunk_opts()?, Some(ChunkOpts::empty())); + b.chunk_opts(ChunkOpts::DONT_FILTER_PARTIAL_CHUNKS); + assert_eq!(b.finish()?.get_chunk_opts()?, Some(ChunkOpts::DONT_FILTER_PARTIAL_CHUNKS)); + Ok(()) +} + +#[test] +fn test_dcpl_set_alloc_time() -> hdf5::Result<()> { + check_matches!(DC::try_new()?.get_alloc_time()?, (), AllocTime::Late); + let mut b = DCB::new(); + b.alloc_time(None); + b.layout(Layout::Contiguous); + check_matches!(b.finish()?.get_alloc_time()?, (), AllocTime::Late); + b.layout(Layout::Compact); + check_matches!(b.finish()?.get_alloc_time()?, (), AllocTime::Early); + b.layout(Layout::Chunked); + check_matches!(b.finish()?.get_alloc_time()?, (), AllocTime::Incr); + #[cfg(feature = "1.10.0")] + { + b.layout(Layout::Virtual); + check_matches!(b.finish()?.get_alloc_time()?, (), AllocTime::Incr); + } + b.layout(Layout::Contiguous); + b.alloc_time(Some(AllocTime::Late)); + check_matches!(b.finish()?.get_alloc_time()?, (), AllocTime::Late); + b.alloc_time(Some(AllocTime::Incr)); + check_matches!(b.finish()?.get_alloc_time()?, (), AllocTime::Incr); + b.alloc_time(Some(AllocTime::Early)); + check_matches!(b.finish()?.get_alloc_time()?, (), AllocTime::Early); + Ok(()) +} + +#[test] +fn test_dcpl_fill_time() -> hdf5::Result<()> { + check_matches!(DC::try_new()?.get_fill_time()?, (), FillTime::IfSet); + check_matches!(DC::try_new()?.fill_time(), (), FillTime::IfSet); + test_pl!(DC, fill_time: FillTime::IfSet); + test_pl!(DC, fill_time: FillTime::Alloc); + test_pl!(DC, fill_time: FillTime::Never); + Ok(()) +} + +#[test] +fn test_dcpl_fill_value() -> hdf5::Result<()> { + use hdf5_derive::H5Type; + use hdf5_types::{FixedAscii, FixedUnicode, VarLenArray, VarLenAscii, VarLenUnicode}; + + check_matches!(DC::try_new()?.get_fill_value_defined()?, (), FillValue::Default); + check_matches!(DC::try_new()?.fill_value_defined(), (), FillValue::Default); + assert_eq!(DC::try_new()?.get_fill_value_as::()?, Some(0.0)); + assert_eq!(DC::try_new()?.fill_value_as::(), Some(false)); + + let mut b = DCB::new(); + b.fill_value(1.23); + let pl = b.finish()?; + assert_eq!(pl.fill_value_defined(), FillValue::UserDefined); + assert_eq!(pl.fill_value_as::(), Some(1.23)); + assert_eq!(pl.fill_value_as::(), Some(1)); + assert!(pl.get_fill_value_as::().is_err()); + + #[derive(H5Type, Clone, Debug, PartialEq, Eq)] + #[repr(C)] + struct Data { + a: FixedAscii<5>, + b: FixedUnicode<5>, + c: [i16; 2], + d: VarLenAscii, + e: VarLenUnicode, + f: VarLenArray, + } + + let data = Data { + a: FixedAscii::from_ascii(b"12345").unwrap(), + b: FixedUnicode::from_str("abcd").unwrap(), + c: [123i16, -1i16], + d: VarLenAscii::from_ascii(b"xy").unwrap(), + e: VarLenUnicode::from_str("pqrst").unwrap(), + f: VarLenArray::from_slice([true, false].as_ref()), + }; + b.fill_value(data.clone()); + assert_eq!(b.finish()?.fill_value_defined(), FillValue::UserDefined); + assert_eq!(b.finish()?.fill_value_as::(), Some(data)); + assert!(b.finish()?.get_fill_value_as::().is_err()); + + Ok(()) +} + +#[test] +fn test_dcpl_external() -> hdf5::Result<()> { + assert_eq!(DC::try_new()?.get_external()?, vec![]); + let pl = DCB::new() + .external("bar", 0, 1) + .external("baz", 34, 100) + .external("foo", 12, 0) + .finish()?; + let expected = vec![ + ExternalFile { name: "bar".to_owned(), offset: 0, size: 1 }, + ExternalFile { name: "baz".to_owned(), offset: 34, size: 100 }, + ExternalFile { name: "foo".to_owned(), offset: 12, size: 0 }, + ]; + assert_eq!(pl.get_external()?, expected); + assert_eq!(pl.external(), expected); + assert_eq!(DCB::from_plist(&pl)?.finish()?.get_external()?, expected); + assert!(DCB::new().external("a", 1, 0).external("b", 1, 2).finish().is_err()); + Ok(()) +} + +#[cfg(feature = "1.10.0")] +#[test] +fn test_dcpl_virtual_map() -> hdf5::Result<()> { + use hdf5::Hyperslab; + use ndarray::s; + + let pl = DC::try_new()?; + assert!(pl.get_virtual_map().is_err()); + assert_eq!(pl.virtual_map(), vec![]); + + let pl = DCB::new().layout(Layout::Virtual).finish()?; + assert_eq!(pl.get_virtual_map()?, vec![]); + assert_eq!(pl.virtual_map(), vec![]); + + let pl = DCB::new() + .layout(Layout::Virtual) + .virtual_map("foo", "bar", (3, 4..), (.., 1..), (10..=20, 10), (..3, 7..)) + .virtual_map("x", "y", 100, 96.., 12, Hyperslab::try_new(s![2..;3])?) + .finish() + .unwrap(); + let expected = vec![ + VirtualMapping { + src_filename: "foo".into(), + src_dataset: "bar".into(), + src_extents: (3, 4..).into(), + src_selection: (..3, 1..4).into(), + vds_extents: (10..=20, 10).into(), + vds_selection: (..3, 7..10).into(), + }, + VirtualMapping { + src_filename: "x".into(), + src_dataset: "y".into(), + src_extents: 100.into(), + src_selection: (96..100).into(), + vds_extents: 12.into(), + vds_selection: Hyperslab::try_new(s![2..12;3])?.into(), + }, + ]; + assert_eq!(pl.get_virtual_map()?, expected); + assert_eq!(pl.virtual_map(), expected); + + assert_eq!(DCB::from_plist(&pl)?.finish()?.get_virtual_map()?, expected); + + let mut b = DCB::new() + .virtual_map("foo", "bar", (3, 4..), (.., 1..), (10..=20, 10), (..3, 7..)) + .clone(); + + // layout is set to virtual if virtual map is given + assert_eq!(b.layout(Layout::Contiguous).finish()?.layout(), Layout::Virtual); + assert_eq!(b.layout(Layout::Compact).finish()?.layout(), Layout::Virtual); + assert_eq!(b.layout(Layout::Chunked).finish()?.layout(), Layout::Virtual); + + // chunks are ignored in virtual mode + assert_eq!(b.chunk((1, 2, 3, 4)).finish()?.layout(), Layout::Virtual); + assert_eq!(b.chunk((1, 2, 3, 4)).finish()?.chunk(), None); + + Ok(()) +} + +#[test] +fn test_dcpl_obj_track_times() -> hdf5::Result<()> { + assert_eq!(DC::try_new()?.get_obj_track_times()?, true); + assert_eq!(DC::try_new()?.obj_track_times(), true); + test_pl!(DC, obj_track_times: true); + test_pl!(DC, obj_track_times: false); + Ok(()) +} + +#[test] +fn test_dcpl_attr_phase_change() -> hdf5::Result<()> { + assert_eq!(DC::try_new()?.get_attr_phase_change()?, AttrPhaseChange::default()); + assert_eq!(DC::try_new()?.attr_phase_change(), AttrPhaseChange::default()); + let pl = DCB::new().attr_phase_change(34, 21).finish()?; + let expected = AttrPhaseChange { max_compact: 34, min_dense: 21 }; + assert_eq!(pl.get_attr_phase_change()?, expected); + assert_eq!(pl.attr_phase_change(), expected); + assert_eq!(DCB::from_plist(&pl)?.finish()?.get_attr_phase_change()?, expected); + assert!(DCB::new().attr_phase_change(12, 34).finish().is_err()); + Ok(()) +} + +#[test] +fn test_dcpl_attr_creation_order() -> hdf5::Result<()> { + assert_eq!(DC::try_new()?.get_attr_creation_order()?.bits(), 0); + assert_eq!(DC::try_new()?.attr_creation_order().bits(), 0); + test_pl!(DC, attr_creation_order: AttrCreationOrder::TRACKED); + test_pl!(DC, attr_creation_order: AttrCreationOrder::TRACKED | AttrCreationOrder::INDEXED); + assert!(DCB::new().attr_creation_order(AttrCreationOrder::INDEXED).finish().is_err()); + Ok(()) +} + +type LC = LinkCreate; +type LCB = LinkCreateBuilder; + +#[test] +fn test_lcpl_common() -> hdf5::Result<()> { + test_pl_common!(LC, PropertyListClass::LinkCreate, |b: &mut LCB| b + .create_intermediate_group(true) + .finish()); + Ok(()) +} + +#[test] +fn test_lcpl_create_intermediate_group() -> hdf5::Result<()> { + assert_eq!(LC::try_new()?.get_create_intermediate_group()?, false); + assert_eq!( + LCB::new().create_intermediate_group(false).finish()?.get_create_intermediate_group()?, + false + ); + assert_eq!( + LCB::new().create_intermediate_group(false).finish()?.create_intermediate_group(), + false + ); + assert_eq!( + LCB::new().create_intermediate_group(true).finish()?.get_create_intermediate_group()?, + true + ); + assert_eq!( + LCB::new().create_intermediate_group(true).finish()?.create_intermediate_group(), + true + ); + let pl = LCB::new().create_intermediate_group(true).finish()?; + assert_eq!(LCB::from_plist(&pl)?.finish()?.get_create_intermediate_group()?, true); + Ok(()) +} + +#[test] +fn test_lcpl_char_encoding() -> hdf5::Result<()> { + use hdf5::plist::link_create::CharEncoding; + assert_eq!(LC::try_new()?.get_char_encoding()?, CharEncoding::Ascii); + assert_eq!( + LCB::new().char_encoding(CharEncoding::Ascii).finish()?.get_char_encoding()?, + CharEncoding::Ascii + ); + assert_eq!( + LCB::new().char_encoding(CharEncoding::Ascii).finish()?.char_encoding(), + CharEncoding::Ascii + ); + assert_eq!( + LCB::new().char_encoding(CharEncoding::Utf8).finish()?.get_char_encoding()?, + CharEncoding::Utf8 + ); + assert_eq!( + LCB::new().char_encoding(CharEncoding::Utf8).finish()?.char_encoding(), + CharEncoding::Utf8 + ); + let pl = LCB::new().char_encoding(CharEncoding::Utf8).finish()?; + assert_eq!(LCB::from_plist(&pl)?.finish()?.get_char_encoding()?, CharEncoding::Utf8); + Ok(()) +} diff --git a/tests/tests.rs b/tests/tests.rs new file mode 100644 index 000000000..f7bae99b9 --- /dev/null +++ b/tests/tests.rs @@ -0,0 +1,14 @@ +#[test] +fn roundtrip_compound_type() { + use hdf5::H5Type; + #[derive(H5Type)] + #[repr(C)] + struct Compound { + a: u8, + b: u8, + } + + let dt = hdf5::Datatype::from_type::().unwrap(); + let td = dt.to_descriptor().unwrap(); + assert_eq!(td, Compound::type_descriptor()); +}