Skip to content
Snippets Groups Projects
Commit 78e0fa7e authored by Nicolas Richart's avatar Nicolas Richart
Browse files

Merge branch 'main' of gitlab.epfl.ch:SCITAS/software-stack/scitas-software-stacks

parents 0df950ce 2c026c13
No related branches found
No related tags found
No related merge requests found
Showing
with 1215 additions and 14 deletions
......@@ -29,6 +29,11 @@ variables:
slurm_options: ["-c 36"]
stack: ["pinot-noir"]
app_image: ["registry.c4science.ch/scitas-stack/rhel9-kuma"]
- environment: [kuma_h100]
slurm_options: ["-c 64 -p h100"]
stack: ["pinot-noir-gcc"]
app_image: ["registry.c4science.ch/scitas-stack/rhel9-kuma"]
# - environment: [izar]
# slurm_options: ['-c 40 --gpus 2']
# apptainer_options: ['-nv']
......
......@@ -6,8 +6,18 @@ FROM registry.c4science.ch/scitas-stack/rhel9-base:9.0
# zlib-devel needed to compiler gcc ?
# repos for kuma
ARG APPSTREAM_REPO=rhel-9-for-x86_64-appstream-rpms
ARG SLURM_REPO=SCITAS_product-slurm_repo-slurm-2311
ARG PMIX_REPO=SCITAS_product-scitas_repo-scitas-cluster
ARG MLNX_REPO=SCITAS_product-mlnx_repo-mlnx-ofed-24_01-0_3_3_1-5_14_0-70_30_1-rhel90
ARG CUDA_REPO=SCITAS_product-cuda_repo-cuda-535_154_05-x86_64-rhel9
ARG GPFS_REPO=SCITAS_product-gpfs_repo-gpfs-519
RUN sed /etc/yum.repos.d/ubi.repo -i -e 's/enabled = 1/enabled = 0/'
# spack dependencies
RUN yum install -y \
RUN yum install -y --enablerepo=$APPSTREAM_REPO \
gcc-g++ gcc-gfortran \
patchelf findutils patch \
xz bzip2 file gnupg2 hostname iproute unzip zlib-devel\
......@@ -17,20 +27,13 @@ RUN yum install -y \
jq \
&& yum -y clean all && rm -fr /var/cache
# repos for kuma
ARG SLURM_REPO=SCITAS_product-slurm_repo-slurm-2311
ARG PMIX_REPO=SCITAS_product-scitas_repo-scitas-cluster
ARG MLNX_REPO=SCITAS_product-mlnx_repo-mlnx-ofed-24_01-0_3_3_1-5_14_0-70_30_1-rhel90
ARG CUDA_REPO=SCITAS_product-cuda_repo-cuda-535_154_05-x86_64-rhel9
ARG GPFS_REPO=SCITAS_product-gpfs_repo-gpfs-519
# stack dependencies
RUN yum install -y --enablerepo=$SLURM_REPO \
slurm-devel slurm-libpmi \
&& yum -y clean all && rm -fr /var/cache
RUN yum install -y --enablerepo=$PMIX_REPO \
slurm-devel slurm-libpmi \
pmix \
&& yum -y clean all && rm -fr /var/cache
RUN yum install -y --enablerepo=$MLNX_REPO \
......
concretizer:
reuse: false
unify: false
duplicates:
strategy: minimal
{
"spack": {
"version": "v0.22.0",
"repos": {
"scitas-externals": {
"url": "https://gitlab.epfl.ch/SCITAS/software-stack/spack-repo-externals.git",
"branch": "releases/2024.0.0"
},
"scitas-packages": {
"url": "https://gitlab.epfl.ch/SCITAS/software-stack/scitas-spack-packages.git",
"branch": "releases/2024.0.0"
}
},
"mirrors": {
"local": {
"url": "spack-mirror",
"type": "relative"
},
"restricted": {
"url": "spack-mirror-restriced",
"type": "relative"
}
}
},
"stack": {
"version": "v1",
"mount_point": "/stack",
"system_arch": "target=x86_64_v3",
"system_compiler": {
"gcc@11": {
"compiler": "gcc",
"spec": "gcc@11.2.1",
"version": "11.2.1"
}
},
"targets": {
"helvetios": "skylake_avx512",
"izar": "cascadelake",
"jed": "icelake",
"kuma_l40s": "zen4",
"kuma_h100": "zen4"
},
"compilers": {
"gcc": {
"compiler": "gcc",
"spec": "gcc@12.3.0 %gcc@11.2.1",
"version": "12.3.0"
}
},
"system_packages": [
"autoconf",
"autoconf-archive",
"automake",
"autotools",
"berkley-db",
"bzip2",
"ca-certificates-mozilla",
"check",
"cmake",
"cuda",
"curl",
"diffutils",
"expat",
"findutils",
"flex",
"font-util",
"freetype",
"gawk",
"gdbm",
"gettext",
"git",
"glibc",
"gmake",
"gmp",
"gnuplot",
"hcoll",
"help2man",
"libaec",
"libedit",
"libevent",
"libfuse",
"libidn2",
"libpciaccess",
"libsigsegv",
"libssh2",
"libtiff",
"libtool",
"libunistring",
"libuuid",
"libxml2",
"lz4",
"m4",
"macro-utils",
"meson",
"mpc",
"mpfr",
"ncurses",
"ninja",
"openssl",
"pcre",
"pcre2",
"perl",
"pigz",
"pkgconf",
"pmix",
"rdma-core",
"readline",
"slurm",
"sqlite",
"tar",
"texinfo",
"utf8proc",
"util-linux-uuid",
"xpmem",
"xxd-standalon",
"xz",
"zlib-ng",
"zstd"
]
}
}
config:
# ccache: true
install_missing_compilers: false
install_tree:
padded_length: 242
build_stage:
- $tempdir/$user/spack-stage
definitions: []
definitions:
- cuda_system_codes:
- cuda
- cudnn
- cuda_serial_codes:
- nccl
definitions:
- cuda_system_codes:
- cuda
- cudnn
- cuda_serial_codes:
- nccl
mirrors: {}
modules:
default:
enable: [lmod]
lmod:
hash_length: 0
core_compilers:
- gcc@11.4.1
hierarchy:
- mpi
all:
environment:
set:
${PACKAGE}_ROOT: ${PREFIX}
suffixes:
'+mpi': mpi
'+openmp': openmp
'threads=openmp': openmp
'^fftw+openmp': openmp
'hdf5=parallel': h5
'+cuda': cuda
'+double-gpu': double-gpu
'+plumed': plumed
'+unwind': unwind
'+debug': dbg
'+ilp64': int64
'+int64': int64
'version_suffix=jl': julia
blacklist_implicits: true
blacklist:
- lmod
gcc:
environment:
set:
CC: ${PREFIX}/bin/gcc
CXX: ${PREFIX}/bin/g++
F77: ${PREFIX}/bin/gfortran
FC: ${PREFIX}/bin/gfortran
F90: ${PREFIX}/bin/gfortran
cuda@12.4.0:
environment:
append_path:
JULIA_LOAD_PATH: ":/ssoft/spack/external/julia/syrah.v1/cuda"
openmpi:
environment:
set:
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
append_path:
JULIA_LOAD_PATH: ":/ssoft/spack/external/julia/syrah.v1/mpi"
openblas threads=pthreads:
environment:
set:
OPENBLAS_NUM_THREADS: '1'
hdf5~mpi+cxx:
environment:
set:
HDF5_CC: h5cc
HDF5_CXX: h5c++
HDF5_FC: h5fc
hdf5+mpi~cxx:
environment:
set:
HDF5_CC: h5pcc
HDF5_FC: h5pfc
intel-oneapi-mpi:
environment:
set:
I_MPI_PMI_LIBRARY: /usr/lib64/libpmi2.so
I_MPI_EXTRA_FILESYSTEM: '0'
SLURM_MPI_TYPE: pmi2
julia:
autoload: direct
metis:
suffixes:
~real64: sp
molpro:
template: modules/group_restricted.lua
plumed:
environment:
set:
PLUMED_KERNEL: ${PREFIX}/lib/libplumedKernel.so
unset:
- PLUMED_ROOT
py-horovod:
autoload: direct
py-keras:
autoload: direct
py-tensorflow:
autoload: direct
py-theano:
autoload: direct
py-torch:
autoload: direct
py-torchvision:
autoload: direct
quantum-espresso:
suffixes:
hdf5=parallel: hdf5
scala:
autoload: direct
scons:
suffixes:
'^python@:2.99': py2
'^python@3:': py3
abaqus:
template: modules/group_restricted.lua
adf:
template: modules/group_restricted.lua
ams:
template: modules/group_restricted.lua
comsol:
environment:
prepend_path:
MATLABPATH: ${PREFIX}/mli
crystal17:
template: modules/group_restricted.lua
fdtd:
environment:
prepend_path:
PATH: ${PREFIX}/mpich2/nemesis/bin
LD_LIBRARY_PATH: ${PREFIX}/mpich2/nemesis/lib
set:
SLURM_MPI_TYPE: pmi2
gaussian:
template: modules/group_restricted.lua
spark:
autoload: direct
environment:
prepend_path:
PATH: /ssoft/spack/scripts/all/spark
ucx:
environment:
unset:
- UCX_ROOT
vasp:
template: modules/group_restricted.lua
packages:
all:
target: ["skylake_avx512"]
require: ['~cuda', '~rocm']
# py-torch:
# require:
# - spec: '~cuda ~cudnn ~nccl ~rocm'
packages:
all:
target: ["cannonlake"]
require:
- spec: ^openmpi +cuda
when: '%gcc ^mpi'
- spec: cuda_arch=70
when: '+cuda'
- '+cuda'
packages:
all:
target: ["icelake"]
require:
- "~rocm"
- spec: cuda_arch=90 target=icelake
when: '+cuda'
# - spec: '~cuda'
# when: '%oneapi'
- spec: '+cuda'
when: '%gcc'
# ---------------------------------------------------------------------------
hypre:
require:
- spec: '+cuda cuda_arch=90 +unified-memory'
when: '%gcc'
kokkos:
require:
- spec: '+cuda cuda_arch=90 +cuda_uvm +wrapper ~openmptarget'
when: '%gcc'
openmpi:
require:
- spec: '+cuda cuda_arch=90 target=icelake'
petsc:
require:
- spec: '+cuda cuda_arch=90'
when: '%gcc'
py-tensorflow:
require:
- spec: '+cuda cuda_arch=90 +nccl'
py-torch:
require:
- spec: '+cuda cuda_arch=90 +nccl +cudnn'
quantum-espresso:
require:
- spec: '~cuda'
suite-sparse:
require:
- spec: '+cuda'
ucx:
require:
- spec: '+xpmem +cma +rdmacm +rc +ud +dc +verbs +ib_hw_tm +gdrcopy +cuda cuda_arch=90 target=icelake ~rocm'
# ---------------------------------------------------------------------------
# Externals
# ---------------------------------------------------------------------------
pmix:
buildable: false
externals:
- spec: 'pmix@5.0.2'
prefix: /usr
rdma-core:
buildable: false
externals:
- spec: 'rdma-core@47.1'
prefix: /usr
slurm:
buildable: false
externals:
- spec: 'slurm@24-05-0-2'
prefix: /usr
xpmem:
buildable: false
externals:
- spec: 'xpmem@2.7.3'
prefix: /usr
packages:
all:
target: ["zen4"]
require:
- "~rocm"
- spec: cuda_arch=90 target=zen4
when: '+cuda'
- spec: '+cuda'
when: '%gcc'
# ---------------------------------------------------------------------------
hypre:
require:
- spec: '+cuda cuda_arch=90 +unified-memory'
when: '%gcc'
kokkos:
require:
- spec: '+cuda cuda_arch=90 +cuda_uvm +wrapper ~openmptarget'
when: '%gcc'
openmpi:
require:
- spec: '+cuda cuda_arch=90 target=zen4'
petsc:
require:
- spec: '+cuda cuda_arch=90'
when: '%gcc'
py-tensorflow:
require:
- spec: '+cuda cuda_arch=90 +nccl'
py-torch:
require:
- spec: '+cuda cuda_arch=90 +nccl +cudnn'
quantum-espresso:
require:
- spec: '~cuda'
suite-sparse:
require:
- spec: '+cuda'
ucx:
require:
- spec: '+xpmem +cma +rdmacm +rc +ud +dc +verbs +ib_hw_tm +gdrcopy +cuda cuda_arch=90 ~rocm target=zen4'
# ---------------------------------------------------------------------------
# Externals
# ---------------------------------------------------------------------------
pmix:
buildable: false
externals:
- spec: 'pmix@5.0.2'
prefix: /usr
rdma-core:
buildable: false
externals:
- spec: 'rdma-core@47.1'
prefix: /usr
slurm:
buildable: false
externals:
- spec: 'slurm@24-05-0-2'
prefix: /usr
xpmem:
buildable: false
externals:
- spec: 'xpmem@2.7.3'
prefix: /usr
packages:
all:
target: ["zen4"]
require:
- spec: ^openmpi +cuda
when: '%gcc ^mpi'
- spec: cuda_arch=89
when: '+cuda'
- '+cuda'
packages:
all:
providers:
mpi: [openmpi, intel-oneapi-mpi]
iconv: [libiconv]
jpeg: [libjpeg]
zlib-api: [zlib-ng]
require:
- spec: "^libiconv"
when: "^iconv"
# - spec: ^intel-oneapi-mpi
# when: '%oneapi ^mpi'
# - spec: ^openmpi
# when: '%gcc ^mpi'
# - spec: ^openmpi
# when: '%aocc ^mpi'
# - spec: ^nvhpc+mpi
# when: '%nvhpc ^mpi'
# - spec: ^openblas %gcc
# when: '%gcc ^blas'
# - spec: ^eigen %gcc
# when: '%gcc ^eigen'
# - spec: ^kokkos %gcc
# when: '%gcc ^kokkos'
# - spec: ^openblas
# when: '%gcc ^lapack'
# - spec: ^intel-oneapi-mkl
# when: '%oneapi ^blas'
# - spec: ^intel-oneapi-mkl
# when: '%oneapi ^lapack'
# - spec: ^nvhpc+blas
# when: '%nvhpc ^blas'
# - spec: ^nvhpc+lapack
# when: '%nvhpc ^lapack'
# - spec: ^openblas
# when: '%gcc ^py-numpy'
# - spec: ^intel-oneapi-mkl
# when: '%oneapi ^py-numpy'
- spec: '+ipo'
when: '%gcc'
# ----------------------------------------------------------------------------
# Requirements
# ----------------------------------------------------------------------------
adios2:
require: ['+kokkos']
boost:
prefer: ['~mpi']
require: ['cxxstd=14', '+icu', '+python', '+numpy', '+atomic', '+chrono',
'+container', '+date_time', '+filesystem', '+graph', '+iostreams', '~json',
'+locale', '+log', '+math', '~pic', '+program_options', '+random', '+regex',
'+serialization', '+shared', '+signals', '~singlethreaded', '~stacktrace',
'+system', '~taggedlayout', '+test', '+thread', '+timer', '~type_erasure',
'~versionedlayout', '+wave', '+exception']
cuda:
require:
- spec: '@12.4.0'
- spec: '+allow-unsupported-compilers'
when: '%oneapi'
fenics-dolfinx:
require: ['+slepc']
fftw:
prefer: ['+mpi', '+openmp']
gnuplot:
require:
- spec: '@5.4.10'
hdf5:
prefer:
- spec: '+mpi'
- spec: '@1.14.3'
require:
- spec: '+hl +threadsafe +szip +cxx +fortran'
- spec: '+ipo'
when: '%gcc'
hypre:
prefer:
- spec: '@2.31.0 ~magma'
intel-oneapi-mkl:
prefer: ['~cluster']
kokkos:
prefer: ['+openmp', '+openmptarget', '+numactl', '+memkind', '+threads',
'+aggressive_vectorization']
lammps:
require: ['build_type=Release', '+asphere', '+atc', '+body', '+class2',
'+colloid', '+compress', '+coreshell', '+dipole', '+diffraction', '+extra-dump',
'+granular', '+h5md', '+kspace', '+latboltz', '+latte', '+lib', '+manybody',
'+mc', '+misc', '+molecule', '+mpi', '+mpiio', '+netcdf', '+peri', '~poems',
'+python', '+qeq', '+replica ', '+rigid', '+shock', '+ml-snap', '+srd',
'+voronoi', '+plumed', '+kokkos']
libfabric:
require:
- spec: 'fabrics=mlx,mrail,psm3,verbs,udp,tcp,sockets,shm'
llvm:
prefer: ['~clang']
libxcb:
require: ['+use_spack_interpreter']
mesa:
require: ['~llvm', '+opengl', '+opengles', '+osmesa']
metis:
require: ['+real64']
netcdf-c:
prefer: ['+mpi']
openblas:
prefer: ['threads=pthreads symbol_suffix=none ~ilp64']
opencv:
prefer: ['+vtk', '+python3']
openmpi:
require:
- spec: '@5.0.3 fabrics=ofi,ucx,verbs schedulers=slurm ~rsh +romio romio-filesystem=gpfs +internal-pmix ~memchecker'
petsc:
require:
- spec: '+hypre +mumps +saws +scalapack +mpi +suite-sparse +kokkos'
python:
require:
- spec: '+ssl +tkinter'
- spec: '+optimizations'
when: '%gcc'
py-tensorflow:
require: ['+mpi', '+mkl']
py-torch:
require: ['+mpi']
suite-sparse:
require:
- spec: '+graphblas'
tk:
require: ['+xft', '+xss']
ucx:
require:
- spec: +rdmacm +rc +dc +ud +cma +verbs
zlib-ng:
require: ['build_system=autotools']
# ----------------------------------------------------------------------------
# System dependencies
# ----------------------------------------------------------------------------
egl:
buildable: false
externals:
- spec: 'egl@21.3.4'
prefix: /usr
pmix:
buildable: false
externals:
- spec: 'pmix@5.0.1'
prefix: /usr
rdma-core:
buildable: false
externals:
- spec: 'rdma-core@47.1'
prefix: /usr
slurm:
buildable: false
externals:
- spec: 'slurm@23-11-7-1'
prefix: /usr
# ----------------------------------------------------------------------------
# Externals
# ----------------------------------------------------------------------------
abaqus:
buildable: false
permissions: {read: group, group: abaqus-soft}
externals:
- {spec: abaqus@2019, prefix: /ssoft/spack/external/abaqus/2019}
- {spec: abaqus@2023, prefix: /ssoft/spack/external/abaqus/2023}
ansys:
buildable: false
externals:
- {spec: ansys@2020R2, prefix: /ssoft/spack/external/ansys/2020R2/v202}
- {spec: ansys@2022R1, prefix: /ssoft/spack/external/ansys/2022R1/v221}
- {spec: ansys@2022R2, prefix: /ssoft/spack/external/ansys/2022R2/v222}
- {spec: ansys@2024R1, prefix: /ssoft/spack/external/ansys/2024R1/v241}
cfdplusplus:
buildable: false
externals:
- {spec: cfdplusplus@16.1, prefix: /ssoft/spack/external/CFD++/2016.05}
- {spec: cfdplusplus@19.1, prefix: /ssoft/spack/external/CFD++/19.1}
comsol:
buildable: false
permissions: {read: group, group: comsol-soft}
externals:
- {spec: comsol@5.6, prefix: /ssoft/spack/external/comsol/5.6/comsol56/multiphysics/}
- {spec: comsol@6.0, prefix: /ssoft/spack/external/comsol/6.0}
- {spec: comsol@6.2, prefix: /ssoft/spack/external/comsol/6.2}
fdtd:
buildable: false
permissions: {read: group, group: fdtd-soft}
externals:
- {spec: fdtd@2020-R2-2387, prefix: /ssoft/spack/external/fdtd/8.24.2387}
- {spec: fdtd@2020-R2.4-2502, prefix: /ssoft/spack/external/fdtd/2020-R2.4-2502}
- {spec: fdtd@2021-R2.2-2806, prefix: /ssoft/spack/external/fdtd/2021-R2.2-2806}
- {spec: fdtd@2022-R1.1-2963, prefix: /ssoft/spack/external/fdtd/2022-R1.1-2963}
gaussian:
buildable: false
externals:
- {spec: gaussian@g16-A.03, prefix: /ssoft/spack/external/gaussian/g16-A.03/avx2}
- {spec: gaussian@g16-C.01, prefix: /ssoft/spack/external/gaussian/g16-C.01/avx2}
gurobi:
buildable: false
externals:
- {spec: gurobi@8.1.1, prefix: /ssoft/spack/external/gurobi/8.1.1}
- {spec: gurobi@9.5.2, prefix: /ssoft/spack/external/gurobi/9.5.2}
- {spec: gurobi@10.0.1, prefix: /ssoft/spack/external/gurobi/10.0.1}
maple:
buildable: false
externals:
- {spec: maple@2017, prefix: /ssoft/spack/external/Maple/2017}
mathematica:
buildable: false
externals:
- {spec: mathematica@11.1.1, prefix: /ssoft/spack/external/Mathematica/11.1.1}
- {spec: mathematica@13.0, prefix: /ssoft/spack/external/Mathematica/13.0}
matlab:
buildable: false
externals:
- {spec: matlab@R2018a, prefix: /ssoft/spack/external/MATLAB/R2018a}
- {spec: matlab@R2019b, prefix: /ssoft/spack/external/MATLAB/R2019b}
- {spec: matlab@R2024a, prefix: /ssoft/spack/external/MATLAB/R2024a}
molpro:
buildable: false
externals:
- {spec: molpro@2022.3.0, prefix: /ssoft/spack/external/molpro/2022.3.0/mpi}
smr:
buildable: false
externals:
- {spec: smr@2017.0, prefix: /ssoft/spack/external/SMR/2017.06}
totalview:
externals:
- {spec: totalview@2017.2.11, prefix: /ssoft/spack/external/toolworks/totalview.2017.2.11}
- {spec: totalview@2020.3.11, prefix: /ssoft/spack/external/toolworks/totalview.2020.3.11}
spack:
include:
- compiler_definitions.yaml
- packages_stack.yaml
- packages_env.yaml
- modules_stack.yaml
- definitions_env.yaml
- config_stack.yaml
definitions:
# -------------------------------------------------------------------------
# PE definition
# -------------------------------------------------------------------------
- mpi_gcc:
- openmpi@5.0.3
- mpi_oneapi:
- intel-oneapi-mpi@2021.12.1
- blas_gcc:
- openblas@0.3.26
- blas_oneapi:
- intel-oneapi-mkl@2024.1.0
- python_gcc:
- python@3.11.7
- python_oneapi:
- python@3.11.7
# -------------------------------------------------------------------------
# Core packages
# -------------------------------------------------------------------------
- core_codes:
- bzip2
- cmake
- emacs +tls
- fastqc
- fio
- git
- git-lfs
- gmp
- gnuplot
- gzip
- imagemagick +ghostscript
- intel-oneapi-advisor
- intel-oneapi-inspector
- intel-oneapi-vtune
- libarchive
- libjpeg-turbo
- libpng
- libtiff
- libxml2
- lmod
- mpfr
- neovim
- ninja
- parallel
- picard
- rclone
- rust
- sbt
- sratoolkit
- subversion
- tar
- tcl
- tk
- tmux
- trimmomatic
- valgrind
- xclip
- zlib-ng
# -------------------------------------------------------------------------
# Serial codes
# -------------------------------------------------------------------------
- serial_codes:
- bwa
- boost ~mpi
- fftw +openmp ~mpi
- fftw ~openmp ~mpi
- eigen
- gsl
- hdf5@1.14.1 ~mpi
- hisat2
- htslib
- intel-oneapi-tbb
- jasper
- kallisto ^hdf5 ~mpi
- kokkos
- mafft
- metis
- muscle
- netcdf-c~mpi ^hdf5~mpi
- netcdf-fortran ^netcdf-c ~mpi ^hdf5 ~mpi
- nfft ^fftw~mpi~openmp
- python
- scotch ~mpi
- star
- subread
- superlu
- unblur ^fftw~mpi~openmp
- voropp
- gcc_serial_codes:
- cistem ^fftw~mpi
- ctffind ^fftw ~mpi
- ffmpeg +libx264
- glpk+gmp
- hwloc
- intel-oneapi-mkl
- libxc
- ncview ^hdf5 ~mpi ^netcdf-c ~mpi ~parallel-netcdf
- sox
- stacks
- oneapi_serial_codes:
- abaqus@2023
- intel-oneapi-ipp
# -------------------------------------------------------------------------
# Empty to specialize in environments
# -------------------------------------------------------------------------
- cuda_system_codes: []
- cuda_serial_codes: []
# -------------------------------------------------------------------------
# Blas dependent codes
# -------------------------------------------------------------------------
- blas_codes:
- arpack-ng ~mpi
- superlu
- suite-sparse
- gcc_blas_codes:
- armadillo +hdf5 ^arpack-ng ~mpi ^hdf5 ~mpi
- octave
# -------------------------------------------------------------------------
# Python packages
# -------------------------------------------------------------------------
- python_codes:
- bedtools2
- bowtie2
- cairo
- mercurial
- prinseq-lite
- py-absl-py
- py-astunparse
- py-backports-entry-points-selectable
- py-certifi
- py-charset-normalizer
- py-cycler
- py-cython
- py-distlib
- py-filelock
- py-gast
- py-google-pasta
- py-idna
- py-kiwisolver
- py-mpmath
- py-packaging
- py-pillow
- py-pip
- py-platformdirs
- py-ply
- py-protobuf
- py-pybind11
- py-pyparsing
- py-python-dateutil
- py-pytz
- py-requests
- py-semver
- py-six
- py-sympy
- py-termcolor
- py-urllib3
- py-virtualenv
- py-wheel
- py-wrapt
- samtools
- scons
- snakemake
- python_blas_codes:
- boost
- gmsh ~mpi +hdf5 +cgns +eigen ~opencascade +openmp ~fltk ~med
- iq-tree
- py-biopython
- py-keras-preprocessing
- py-macs2
- py-matplotlib
- py-numpy
- py-opt-einsum
- py-pandas
- py-pybigwig
- py-xarray
- gcc_python_codes:
- blast-plus
- caffe +python
- gatk
- gdb +tui +source-highlight +xz
- julia+openlibm
- libgd
- mesa
- mummer
- node-js
- openbabel +python
- r
- spades
- xgboost
- gcc_python_blas_codes:
- polymake ^cddlib@0.94h
- py-cryolobm
- py-deeptools
- py-h5py ~mpi ^hdf5 ~mpi
- py-pymol
- py-scikit-learn
- py-scipy
- py-statsmodels
- py-theano
# -------------------------------------------------------------------------
# Parallel codes
# -------------------------------------------------------------------------
- mpi_codes:
- fftw +mpi +openmp
- hdf5@1.14.3 +mpi
- openfoam-org +metis
- osu-micro-benchmarks +graphing
- parmetis
- phylobayesmpi
- py-mpi4py
- scotch +mpi
- netcdf-c
- netcdf-fortran
- parmetis
- gcc_mpi_codes:
- wrf ~pnetcdf build_type=dm+sm
- mpi_blas_codes:
- arpack-ng +mpi
- hypre@2.31.0
- mumps@5.6.2
- superlu-dist@8.2.1
#- quantum-espresso +mpi +scalapack +gipaw
#- quantum-espresso +mpi +scalapack +gipaw hdf5=parallel
- gcc_mpi_blas_codes:
- gmsh +mpi +eigen +openmp +hdf5 ~fltk ~opencascade ~med ^mmg ~vtk
- cpmd ~openmp
- elmerfem +mumps +openmp +hypre
- netlib-scalapack
- yambo +mpi io=iotk,etsf-io
- mpi_blas_python_codes:
- boost +mpi
- cgal
- iq-tree@1.6.12+mpi
- neuron +mpi +python
- petsc@3.21.1
- plumed
- py-petsc4py
- slepc
- gcc_mpi_blas_python_codes:
# - eman2 +mpi
- adios2
- cp2k +mpi +plumed +openmp smm=blas
# - fenics +parmetis +hdf5 +scotch +suite-sparse ~vtk ~trilinos
- fenics-dolfinx
- gromacs +mpi +plumed
- lammps
- opencv
- openfoam +metis
- paraview +shared +python +hdf5 ~osmesa +opengl2 +egl
- py-fenics-dolfinx
- py-h5py +mpi
- py-horovod
- py-keras
- py-tensorflow
- py-torch
- py-torchvision
- relion ~mklfft
- topaz
- vtk
# -------------------------------------------------------------------------
# External packages
# -------------------------------------------------------------------------
- external_packages: []
- when: env['environment_type'] != 'cloud'
external_packages:
- ansys
- cfdplusplus
- comsol
- fdtd
- gurobi
- gaussian
- matlab
- maple
- mathematica
- molpro
- smr
- when: env['environment_type'] != 'cloud'
blas_mpi_codes:
- vasp +hdf5 +scalapack +shmem +wannier90
# -------------------------------------------------------------------------
# Generic definitions
# -------------------------------------------------------------------------
- mpis:
- $mpi_oneapi
- $mpi_gcc
specs:
- matrix:
- [$compilers_specs]
- matrix:
- [$blas_gcc]
- [$%compiler_gcc]
- matrix:
- [$mpi_gcc]
- [$%compiler_gcc]
- matrix:
- [$core_codes]
- [$%system_compiler]
- matrix:
- [$cuda_system_codes]
- [$%system_compiler]
- matrix:
- [$serial_codes]
- [$%compilers]
- matrix:
- [$gcc_serial_codes]
- [$%compiler_gcc]
- matrix:
- [$cuda_serial_codes]
- [$%compiler_gcc]
- matrix:
- [$blas_codes]
- [$^blas_gcc]
- [$%compiler_gcc]
- matrix:
- [$gcc_blas_codes]
- [$^blas_gcc]
- [$%compiler_gcc]
- matrix:
- [$python_codes]
- [$^python_gcc]
- [$%compiler_gcc]
- matrix:
- [$gcc_python_codes]
- [$^python_gcc]
- [$%compiler_gcc]
- matrix:
- [$python_blas_codes]
- [$^python_gcc]
- [$^blas_gcc]
- [$%compiler_gcc]
- matrix:
- [$gcc_python_blas_codes]
- [$^python_gcc]
- [$^blas_gcc]
- [$%compiler_gcc]
- matrix:
- [$mpi_codes]
- [$^mpi_gcc]
- [$%compiler_gcc]
- matrix:
- [$gcc_mpi_codes]
- [$^mpi_gcc]
- [$%compiler_gcc]
- matrix:
- [$mpi_blas_codes]
- [$^blas_gcc]
- [$^mpi_gcc]
- [$%compiler_gcc]
- matrix:
- [$mpi_blas_python_codes]
- [$^blas_gcc]
- [$^mpi_gcc]
- [$^python_gcc]
- [$%compiler_gcc]
- matrix:
- [$gcc_mpi_blas_python_codes]
- [$^blas_gcc]
- [$^mpi_gcc]
- [$^python_gcc]
- [$%compiler_gcc]
view: false
# default:
# root: /stack/packages/
{% extends "modules/modulefile.lua" %}
{% block footer %}
-- Access is granted only to specific groups, most likely because the software is licensed
if not isDir("{{ spec.prefix }}") then
LmodError (
"You don't have the necessary rights to run \"{{ spec.name }}\".\n\n",
"\tPlease write an e-mail to 1234@epfl.ch if you need further information on how to get access to it.\n"
)
end
{% endblock %}
concretizer:
reuse: false
unify: when_possible
unify: false
duplicates:
strategy: minimal
......@@ -29,8 +29,8 @@
"system_compiler": {
"gcc@11": {
"compiler": "gcc",
"spec": "gcc@11.4.1",
"version": "11.4.1"
"spec": "gcc@11.2.1",
"version": "11.2.1"
}
},
"targets": {
......@@ -43,12 +43,12 @@
"compilers": {
"oneapi": {
"compiler": "oneapi",
"spec": "intel-oneapi-compilers@2024.1.0 %gcc@11.4.1",
"spec": "intel-oneapi-compilers@2024.1.0 %gcc@11.2.1",
"version": "2024.1.0"
},
"gcc": {
"compiler": "gcc",
"spec": "gcc@12.3.0 %gcc@11.4.1",
"spec": "gcc@12.3.0 %gcc@11.2.1",
"version": "12.3.0"
}
},
......@@ -73,6 +73,7 @@
"gawk",
"gdbm",
"gettext",
"git",
"glibc",
"gmake",
"gmp",
......@@ -80,13 +81,16 @@
"hcoll",
"help2man",
"libaec",
"libedit",
"libevent",
"libfuse",
"libidn2",
"libpciaccess",
"libsigsegv",
"libssh2",
"libtiff",
"libtool",
"libunistring",
"libuuid",
"libxml2",
"lz4",
......@@ -111,6 +115,7 @@
"tar",
"texinfo",
"utf8proc",
"util-linux-uuid",
"xpmem",
"xxd-standalon",
"xz",
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment