- Infos im HLRS Wiki sind nicht rechtsverbindlich und ohne Gewähr -
- Information contained in the HLRS Wiki is not legally binding and HLRS is not responsible for any damages that might result from its use -
Libraries (Hunter): Difference between revisions
No edit summary |
No edit summary |
||
Line 1: | Line 1: | ||
<span id="libraries-hunter"></span> | |||
= Libraries (Hunter) = | |||
On Hunter it is strongly recommended to use optimized libraries whenever possible. | |||
<span id="numerical-libraries"></span> | |||
== Numerical Libraries == | |||
TBD | |||
<span id="io-libraries"></span> | <span id="io-libraries"></span> | ||
== I/O Libraries == | == I/O Libraries == | ||
Line 8: | Line 18: | ||
* Cray parallel Netcdf | * Cray parallel Netcdf | ||
<span id="hdf5"></span> | |||
=== HDF5 === | |||
On Hunter the [https://cpe.ext.hpe.com/docs/latest/csml/index.html#cray-hdf5 Cray HDF5] version of the [https://support.hdfgroup.org/ Hierarchical Data Format (HDF)] is installed. Latest release notes can be found at https://cpe.ext.hpe.com/docs/latest/csml/cray_hdf5.html. | |||
To use the HDF5 library and corresponding tools load the following module: | |||
<syntaxhighlight lang="bash">module load cray-hdf5-parallel | |||
</syntaxhighlight> | |||
Make sure the include- and library-paths are provided to the compiler and linker. Within the Cray programming Environment this can be done by providing the <code>-l</code> and <code>-I</code> options on the command line. | |||
For Fortran use: | |||
<pre><compiler_wrapper> -L${NETCDF_DIR}/lib/ -I${NETCDF_DIR}/include -lhdf5 -lhdf5_fortran <your_sources></pre> | |||
<span id="fortran-example"></span> | |||
==== Fortran Example ==== | |||
Here we provide a very simple example for the usage of HDF5 in Fortran. | |||
<pre class="title="netcdf_io.f90"">program hdf5_io | |||
implicit none | |||
use mpi | |||
use hdf5 | |||
! Init kind parameters ------------------------------------------------------ | |||
Integer, Parameter :: ik=4, rk=8 | |||
! Some data ----------------------------------------------------------------- | |||
Real(kind=rk), Target :: Data_out(4, 8) | |||
! Variables for netcdf ------------------------------------------------------ | |||
Integer(kind=ik) :: ncid | |||
Integer(kind=ik) :: xDimID | |||
Integer(kind=ik) :: yDimID | |||
Integer(kind=ik) :: varID | |||
Integer(kind=ik) :: DimIDs(2) | |||
Integer(kind=ik) :: ncError | |||
Integer(kind=ik) :: Start(2) | |||
! Variables for mpi --------------------------------------------------------- | |||
Integer(kind=ik) :: mpiError,rank | |||
! Variables for hdf5------------------------------------------------------- | |||
integer(kind=ik) :: h5error | |||
integer(HID_T) :: plist_id | |||
integer(kind=4) :: info = MPI_INFO_NULL | |||
integer(HID_T) :: file_id | |||
integer(kind=ik) :: dataset_rank=2 | |||
integer(HID_T) :: filespace | |||
integer(HID_T) :: memspace | |||
integer(HID_T) :: dset_id | |||
character(len=chl) :: dataset_name = 'TestDataSet' | |||
type(C_PTR) :: buffer | |||
integer(HID_T) :: dsetparams | |||
real(kind=rk) :: t_start =0.0_8 | |||
! MPI init ------------------------------------------------------------------ | |||
Call mpi_init(mpiError) | |||
Call mpi_comm_rank(MPI_COMM_WORLD,rank,mpiError) | |||
! Tell who's there and init data -------------------------------------------- | |||
Write(*,*)"Rank",rank,"netcdf_io ... " | |||
data_out = Real(rank)*0.5_rk | |||
! Init hdf5 ----------------------------------------------------------------- | |||
call h5open_f(h5error) | |||
! Set hdf5 propeties -------------------------------------------------------- | |||
call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, h5error) | |||
call h5pset_fapl_mpio_f(plist_id, MPI_COMM_WORLD, info, h5error) | |||
! Create hdf5 file ---------------------------------------------------------- | |||
call h5fcreate_f("hdf5_io.h5", H5F_ACC_TRUNC_F, file_id, h5error, access_prp=plist_id) | |||
call h5pclose_f(plist_id, h5error) | |||
! Create hdf5 dataspace and dataset ----------------------------------------- | |||
CALL H5PCREATE_F(H5P_DATASET_CREATE_F,dsetparams,h5error) | |||
call h5screate_simple_f(dataset_rank, GlobalGridPoints, filespace, h5error) | |||
call h5dcreate_f(file_id, trim(dataset_name), H5T_NATIVE_DOUBLE, filespace, dset_id, h5error, dsetparams) | |||
call h5sclose_f(filespace, h5error) | |||
call h5screate_simple_f(dataset_rank, LocalGridPoints, memspace, h5error) | |||
call h5dget_space_f(dset_id, filespace, h5error) | |||
call h5sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, Offset, LocalGridPoints, h5error) | |||
! Set dataset properties ---------------------------------------------------- | |||
call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, h5error) | |||
call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, h5error) | |||
! Write data ---------------------------------------------------------------- | |||
buffer = C_LOC(Data_out) | |||
call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, buffer, h5error, & | |||
file_space_id=filespace, mem_space_id=memspace, xfer_prp=plist_id) | |||
! Close hdf5 objects and file ----------------------------------------------- | |||
call h5sclose_f(filespace, h5error) | |||
call h5sclose_f(memspace, h5error) | |||
call h5dclose_f(dset_id, h5error) | |||
call h5pclose_f(plist_id, h5error) | |||
call h5fclose_f(file_id, h5error) | |||
call h5close_f(h5error) | |||
! Goodbye and finalize ------------------------------------------------------ | |||
Write(*,*)"Rank",rank,"done." | |||
Call mpi_finalize(mpiError) | |||
end program hdf5_io | |||
</pre> | |||
To build an executable of the following example use the programming environment of your choise, in our example we stick to the default one. | |||
<pre>#!/bin/bash | |||
ftn -L${HDF5_ROOT}/lib/ -I ${HDF5_ROOT}/include -lhdf5 -lhdf5_fortran hdf5_io.f90 -o hdf5_io | |||
</pre> | |||
The demo can be executed by submitting the following PBS script batch script can be used. | |||
<pre class="title="netcdf_test.pbs"">#!/bin/bash | |||
#PBS -N Test_NetCDF | |||
#PBS -l select=1:node_type=mi300a:mpiprocs=4 | |||
#PBS -l walltime=00:01:00 | |||
#PBS -q test | |||
cd $PBS_O_WORKDIR | |||
module load cray-hdf5-parallel/1.14.3.1 | |||
mpiexec -n 4 --ppn 4 ./hdf5_io | |||
</pre> | |||
As can be seen below, four mpi-ranks produce a single netcdf file with each rank writing a rectangular chunk in x-direction of the contained 2D-datafield. | |||
<pre></pre> | |||
The result output can be generated from the written file <code>hdf5_io.h5</code> with the [https://support.hdfgroup.org/documentation/hdf5/latest/_h5_t_o_o_l__d_p__u_g.html <code>h5dump</code>] tool. | |||
<pre>h5dump</pre> | |||
<!--- | |||
A more sophisticated, configurable example which also determines IO-bandwidth can be found in [HDF5 Extended Tests and Examples](HDF5_Extended_Tests_and_Examples/index.md) | |||
--> | |||
<span id="netcdf"></span> | <span id="netcdf"></span> | ||
=== NetCDF === | === NetCDF === | ||
Line 13: | Line 154: | ||
On Hunter the [https://cpe.ext.hpe.com/docs/latest/csml/index.html#cray-netcdf Cray NetCDF] version of the [https://www.unidata.ucar.edu/software/netcdf/ Network Common Data Form (NetCDF)] library is installed. Latest release notes can be found at https://cpe.ext.hpe.com/docs/latest/csml/cray_parallel_netcdf.html. | On Hunter the [https://cpe.ext.hpe.com/docs/latest/csml/index.html#cray-netcdf Cray NetCDF] version of the [https://www.unidata.ucar.edu/software/netcdf/ Network Common Data Form (NetCDF)] library is installed. Latest release notes can be found at https://cpe.ext.hpe.com/docs/latest/csml/cray_parallel_netcdf.html. | ||
Since Cray NetCDF uses HDF5 as a parallel backend one has to load the following modules to use the library. | |||
---- | <syntaxhighlight lang="bash">module load cray-hdf5-parallel | ||
module load cray-netcdf-hdf5parallel | |||
</syntaxhighlight> | |||
Make sure the include- and library-paths are provided to the compiler and linker. Within the Cray programming Environment this can be done by providing the <code>-l</code> and <code>-I</code> options on the command line. | |||
For Fortran use: | |||
<pre><compiler_wrapper> -L${NETCDF_DIR}/lib/ -I${NETCDF_DIR}/include -lnetcdf -lnetcdff <your_sources></pre> | <pre><compiler_wrapper> -L${NETCDF_DIR}/lib/ -I${NETCDF_DIR}/include -lnetcdf -lnetcdff <your_sources></pre> | ||
<span id="fortran-example"></span> | <span id="fortran-example-1"></span> | ||
==== Fortran Example ==== | ==== Fortran Example ==== | ||
Line 92: | Line 233: | ||
Call mpi_finalize(mpiError) | Call mpi_finalize(mpiError) | ||
End Program netcdf_io</pre> | End Program netcdf_io | ||
</pre> | |||
To build an executable of the following example use the programming environment of your choise, in our example we stick to the default one. | To build an executable of the following example use the programming environment of your choise, in our example we stick to the default one. | ||
<pre>#!/bin/bash | <pre>#!/bin/bash | ||
ftn -L${NETCDF_DIR}/lib/ -I${NETCDF_DIR}/include -lnetcdf -lnetcdff netcdf_io.f90 -o netcdf_io</pre> | ftn -L${NETCDF_DIR}/lib/ -I${NETCDF_DIR}/include -lnetcdf -lnetcdff netcdf_io.f90 -o netcdf_io | ||
</pre> | |||
The demo can be executed by submitting the following PBS script batch script can be used. | The demo can be executed by submitting the following PBS script batch script can be used. | ||
Line 112: | Line 255: | ||
mpiexec -n 4 --ppn 4 ./netcdf_io | mpiexec -n 4 --ppn 4 ./netcdf_io | ||
ncdump netcdf_io.nc > netcdf_io.nc.dump</pre> | ncdump netcdf_io.nc > netcdf_io.nc.dump | ||
</pre> | |||
As can be seen below, four mpi-ranks produce a single netcdf file with each rank writing a rectangular chunk in x-direction of the contained 2D-datafield. | As can be seen below, four mpi-ranks produce a single netcdf file with each rank writing a rectangular chunk in x-direction of the contained 2D-datafield. | ||
Line 132: | Line 276: | ||
0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, | 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, | ||
0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5 ; | 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5 ; | ||
}</pre> | } | ||
</pre> | |||
The result output can be generated from the generated file <code>netcdf.demo.nc</code> with the <code>ncdump</code> tool. | The result output can be generated from the generated file <code>netcdf.demo.nc</code> with the <code>ncdump</code> tool. | ||
<pre>ncdump netcdf_io.nc</pre> | <pre>ncdump netcdf_io.nc</pre> | ||
A more sophisticated, configurable example which also determines IO-bandwidth can be found [ | <!--- | ||
A more sophisticated, configurable example which also determines IO-bandwidth can be found [NetCDF Extended Tests and Examples](NetCDF_Extended_Tests_and_Examples/index.md) | |||
--> |
Revision as of 12:03, 8 February 2025
Libraries (Hunter)
On Hunter it is strongly recommended to use optimized libraries whenever possible.
Numerical Libraries
TBD
I/O Libraries
For parallel I/O three libraries are provided along with the Cray Programming environment
- Cray HDF5
- Cray NetCDF
- Cray parallel Netcdf
HDF5
On Hunter the Cray HDF5 version of the Hierarchical Data Format (HDF) is installed. Latest release notes can be found at https://cpe.ext.hpe.com/docs/latest/csml/cray_hdf5.html.
To use the HDF5 library and corresponding tools load the following module:
module load cray-hdf5-parallel
Make sure the include- and library-paths are provided to the compiler and linker. Within the Cray programming Environment this can be done by providing the -l
and -I
options on the command line.
For Fortran use:
<compiler_wrapper> -L${NETCDF_DIR}/lib/ -I${NETCDF_DIR}/include -lhdf5 -lhdf5_fortran <your_sources>
Fortran Example
Here we provide a very simple example for the usage of HDF5 in Fortran.
program hdf5_io implicit none use mpi use hdf5 ! Init kind parameters ------------------------------------------------------ Integer, Parameter :: ik=4, rk=8 ! Some data ----------------------------------------------------------------- Real(kind=rk), Target :: Data_out(4, 8) ! Variables for netcdf ------------------------------------------------------ Integer(kind=ik) :: ncid Integer(kind=ik) :: xDimID Integer(kind=ik) :: yDimID Integer(kind=ik) :: varID Integer(kind=ik) :: DimIDs(2) Integer(kind=ik) :: ncError Integer(kind=ik) :: Start(2) ! Variables for mpi --------------------------------------------------------- Integer(kind=ik) :: mpiError,rank ! Variables for hdf5------------------------------------------------------- integer(kind=ik) :: h5error integer(HID_T) :: plist_id integer(kind=4) :: info = MPI_INFO_NULL integer(HID_T) :: file_id integer(kind=ik) :: dataset_rank=2 integer(HID_T) :: filespace integer(HID_T) :: memspace integer(HID_T) :: dset_id character(len=chl) :: dataset_name = 'TestDataSet' type(C_PTR) :: buffer integer(HID_T) :: dsetparams real(kind=rk) :: t_start =0.0_8 ! MPI init ------------------------------------------------------------------ Call mpi_init(mpiError) Call mpi_comm_rank(MPI_COMM_WORLD,rank,mpiError) ! Tell who's there and init data -------------------------------------------- Write(*,*)"Rank",rank,"netcdf_io ... " data_out = Real(rank)*0.5_rk ! Init hdf5 ----------------------------------------------------------------- call h5open_f(h5error) ! Set hdf5 propeties -------------------------------------------------------- call h5pcreate_f(H5P_FILE_ACCESS_F, plist_id, h5error) call h5pset_fapl_mpio_f(plist_id, MPI_COMM_WORLD, info, h5error) ! Create hdf5 file ---------------------------------------------------------- call h5fcreate_f("hdf5_io.h5", H5F_ACC_TRUNC_F, file_id, h5error, access_prp=plist_id) call h5pclose_f(plist_id, h5error) ! Create hdf5 dataspace and dataset ----------------------------------------- CALL H5PCREATE_F(H5P_DATASET_CREATE_F,dsetparams,h5error) call h5screate_simple_f(dataset_rank, GlobalGridPoints, filespace, h5error) call h5dcreate_f(file_id, trim(dataset_name), H5T_NATIVE_DOUBLE, filespace, dset_id, h5error, dsetparams) call h5sclose_f(filespace, h5error) call h5screate_simple_f(dataset_rank, LocalGridPoints, memspace, h5error) call h5dget_space_f(dset_id, filespace, h5error) call h5sselect_hyperslab_f(filespace, H5S_SELECT_SET_F, Offset, LocalGridPoints, h5error) ! Set dataset properties ---------------------------------------------------- call h5pcreate_f(H5P_DATASET_XFER_F, plist_id, h5error) call h5pset_dxpl_mpio_f(plist_id, H5FD_MPIO_COLLECTIVE_F, h5error) ! Write data ---------------------------------------------------------------- buffer = C_LOC(Data_out) call h5dwrite_f(dset_id, H5T_NATIVE_DOUBLE, buffer, h5error, & file_space_id=filespace, mem_space_id=memspace, xfer_prp=plist_id) ! Close hdf5 objects and file ----------------------------------------------- call h5sclose_f(filespace, h5error) call h5sclose_f(memspace, h5error) call h5dclose_f(dset_id, h5error) call h5pclose_f(plist_id, h5error) call h5fclose_f(file_id, h5error) call h5close_f(h5error) ! Goodbye and finalize ------------------------------------------------------ Write(*,*)"Rank",rank,"done." Call mpi_finalize(mpiError) end program hdf5_io
To build an executable of the following example use the programming environment of your choise, in our example we stick to the default one.
#!/bin/bash ftn -L${HDF5_ROOT}/lib/ -I ${HDF5_ROOT}/include -lhdf5 -lhdf5_fortran hdf5_io.f90 -o hdf5_io
The demo can be executed by submitting the following PBS script batch script can be used.
#!/bin/bash #PBS -N Test_NetCDF #PBS -l select=1:node_type=mi300a:mpiprocs=4 #PBS -l walltime=00:01:00 #PBS -q test cd $PBS_O_WORKDIR module load cray-hdf5-parallel/1.14.3.1 mpiexec -n 4 --ppn 4 ./hdf5_io
As can be seen below, four mpi-ranks produce a single netcdf file with each rank writing a rectangular chunk in x-direction of the contained 2D-datafield.
The result output can be generated from the written file hdf5_io.h5
with the h5dump
tool.
h5dump
NetCDF
On Hunter the Cray NetCDF version of the Network Common Data Form (NetCDF) library is installed. Latest release notes can be found at https://cpe.ext.hpe.com/docs/latest/csml/cray_parallel_netcdf.html.
Since Cray NetCDF uses HDF5 as a parallel backend one has to load the following modules to use the library.
module load cray-hdf5-parallel
module load cray-netcdf-hdf5parallel
Make sure the include- and library-paths are provided to the compiler and linker. Within the Cray programming Environment this can be done by providing the -l
and -I
options on the command line.
For Fortran use:
<compiler_wrapper> -L${NETCDF_DIR}/lib/ -I${NETCDF_DIR}/include -lnetcdf -lnetcdff <your_sources>
Fortran Example
Here we provide a very simple example for the usage of NetCDF in Fortran.
Program netcdf_io ! Use mpi and netcdf -------------------------------------------------------- Use mpi Use netcdf Implicit None ! Init kind parameters ------------------------------------------------------ Integer, Parameter :: ik=4, rk=8 ! Some data ----------------------------------------------------------------- Real(kind=rk), Target :: Data_out(4, 8) ! Variables for netcdf ------------------------------------------------------ Integer(kind=ik) :: ncid Integer(kind=ik) :: xDimID Integer(kind=ik) :: yDimID Integer(kind=ik) :: varID Integer(kind=ik) :: DimIDs(2) Integer(kind=ik) :: ncError Integer(kind=ik) :: Start(2) ! Variables for mpi --------------------------------------------------------- Integer(kind=ik) :: mpiError,rank ! MPI init ------------------------------------------------------------------ Call mpi_init(mpiError) Call mpi_comm_rank(MPI_COMM_WORLD,rank,mpiError) ! Tell who's there and init data -------------------------------------------- Write(*,*)"Rank",rank,"netcdf_io ... " data_out = Real(rank)*0.5_rk ! Create a netcdf file ------------------------------------------------------ ncError = nf90_create( "netcdf_io.nc", & Ior(NF90_NETCDF4, NF90_MPIIO), & ncid, comm=MPI_COMM_WORLD, info=MPI_INFO_NULL) ! Define global dimensions -------------------------------------------------- ncError = nf90_def_dim(ncid, "x", Int(16,4), xDimID) ncError = nf90_def_dim(ncid, "y", Int( 8,4), yDimID) DimIDs = (/xDimID, yDimID/) ! Define a netcdf variable -------------------------------------------------- ncError = nf90_def_var(ncid, "data", NF90_DOUBLE, DimIDs, varID, & chunksizes=(/4_4,8_4/) ) ! Finalize netcdf definitions ----------------------------------------------- ncError = nf90_enddef(ncid) ! Set local offset per rank ------------------------------------------------- Start = (/ rank*4_4 , 0_4 /) + 1_4 ! Write data ---------------------------------------------------------------- ncError = nf90_put_var(ncid, varID, Data_out, start=Start, count=(/4_4,8_4/)) ! Close file ---------------------------------------------------------------- ncError = nf90_close(ncid) ! Goodbye and finalize ------------------------------------------------------ Write(*,*)"Rank",rank,"done." Call mpi_finalize(mpiError) End Program netcdf_io
To build an executable of the following example use the programming environment of your choise, in our example we stick to the default one.
#!/bin/bash ftn -L${NETCDF_DIR}/lib/ -I${NETCDF_DIR}/include -lnetcdf -lnetcdff netcdf_io.f90 -o netcdf_io
The demo can be executed by submitting the following PBS script batch script can be used.
#!/bin/bash #PBS -N Test_NetCDF #PBS -l select=1:node_type=mi300a:mpiprocs=4 #PBS -l walltime=00:01:00 #PBS -q test cd $PBS_O_WORKDIR module load cray-hdf5-parallel/1.14.3.1 module load cray-netcdf-hdf5parallel/4.9.0.13 mpiexec -n 4 --ppn 4 ./netcdf_io ncdump netcdf_io.nc > netcdf_io.nc.dump
As can be seen below, four mpi-ranks produce a single netcdf file with each rank writing a rectangular chunk in x-direction of the contained 2D-datafield.
netcdf netcdf_io { dimensions: x = 16 ; y = 8 ; variables: double data(y, x) ; data: data = 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5, 0, 0, 0, 0, 0.5, 0.5, 0.5, 0.5, 1, 1, 1, 1, 1.5, 1.5, 1.5, 1.5 ; }
The result output can be generated from the generated file netcdf.demo.nc
with the ncdump
tool.
ncdump netcdf_io.nc