Skip to content
Snippets Groups Projects
Commit 9e9fbe73 authored by Oriol.Tinto's avatar Oriol.Tinto
Browse files

Merge branch 'minimal-configuration' into 'master'

Switching to the advanced configuration method and fixing spack build.

See merge request w2w/autosubmit-icon-today!4
parents 9de11980 89aa2520
No related branches found
No related tags found
1 merge request!4Switching to the advanced configuration method and fixing spack build.
Showing
with 1207 additions and 72 deletions
spack:
init: "" # command to load spack environment, e.g. module load spack,
# use spack/setup-env.sh if empty
url: https://github.com/spack/spack.git # url to download spack if necessary
branch: develop # if downloaded, branch name to use
externals: "" # list of packages we try to find with spack external find
# In LMU we need to include slurm to this list.
compiler: "gcc@12.2.0" # desired compiler for spack
root: "%HPCROOTDIR%/spack" # path to a spack install, will be downloaded to if not present
user_cache_path: "%HPCROOTDIR%/spack_user_cache_path" # spack puts data here when bootstrapping, leave empty to use home folder
user_config_path: "%HPCROOTDIR%/spack_user_config_path" # spack puts data here when bootstrapping, leave empty to use home folder
disable_local_config: false # if true, spack installs into spack source dir
icon:
# The command that will be used to build icon-nwp with spack
build_cmd: "icon-nwp@%ICON.VERSION%% %SPACK.COMPILER%"
# In LRZ we used the following command to point to a specific versio of openmpi:
# build_cmd: "icon-nwp@%ICON.VERSION%% %SPACK.COMPILER% ^openmpi/amct7nx"
# In LMU we used the following command to build the appropriate a version of openmpi works with slurm:
# build_cmd: "icon-nwp@%ICON.VERSION%% %SPACK.COMPILER% ^openmpi+pmi+legacylaunchers schedulers=slurm fabrics=ucx ucx+dc+dm+ib_hw_tm+mlx5_dv+rc+rdmacm+thread_multiple+ud+verbs"
version: master # The latest release at the moment of creating this file was 2.6.5-nwp0
python_environment:
# Name of the virtual environment in the remote platform experiment folder
folder_name: python_environment
python_version: "3.8:" # In spack notation use x: to indicate at least x
requirements:
# Because there's an issue with numba, for now we need to keep a specific version of numpy
- numpy==1.23
# Using enstools-compression to compress the outputs.
- enstools-compression
# We are using the f90nml library to manipulate name lists.
- f90nml
data_management:
# Where do we put the output files afterwards?
local_destination_folder: /scratch/o/Oriol.Tinto/tmp/
experiment:
DATELIST: 20201001
MEMBERS: "fc0"
CHUNKSIZEUNIT: hour
CHUNKSIZE: 12
NUMCHUNKS: 2
CHUNKINI: 0
CALENDAR: standard
\ No newline at end of file
UserInformation:
LMU:
user: FILL_USER
host: FILL_HOST
project: FILL_PROJECT
scratch: FILL_SCRATCH
LRZ:
user: FILL_USER
host: FILL_HOST
project: FILL_PROJECT
scratch: FILL_SCRATCH
Platforms:
LMU:
TYPE: slurm
HOST: %UserInformation.LMU.host%
PROJECT: %UserInformation.LMU.project%
USER: %UserInformation.LMU.project%
SCRATCH_DIR: %UserInformation.LMU.scratch%
ADD_PROJECT_TO_HOST: False
MAX_WALLCLOCK: '48:00'
TEMP_DIR: ''
CUSTOM_DIRECTIVES: "#SBATCH --export=ALL,OMPI_MCA_btl_tcp_if_include=10.0.0.0/8"
LMU_LOGIN:
TYPE: ps
HOST: %UserInformation.LMU.host%
PROJECT: %UserInformation.LMU.project%
USER: %UserInformation.LMU.project%
SCRATCH_DIR: %UserInformation.LMU.scratch%
ADD_PROJECT_TO_HOST: False
MAX_WALLCLOCK: '48:00'
TEMP_DIR: ''
LRZ:
TYPE: slurm
HOST: %UserInformation.LRZ.host%
PROJECT: %UserInformation.LRZ.project%
USER: %UserInformation.LRZ.project%
SCRATCH_DIR: %UserInformation.LRZ.scratch%
ADD_PROJECT_TO_HOST: False
MAX_WALLCLOCK: '48:00'
TEMP_DIR: ''
LRZ_LOGIN:
TYPE: ps
HOST: %UserInformation.LRZ.host%
PROJECT: %UserInformation.LRZ.project%
USER: %UserInformation.LRZ.project%
SCRATCH_DIR: %UserInformation.LRZ.scratch%
ADD_PROJECT_TO_HOST: False
MAX_WALLCLOCK: '48:00'
TEMP_DIR: ''
# Example job with all options specified
JOBS: JOBS:
## Job name
# JOBNAME:
## Script to execute. If not specified, job will be omitted from workflow. "You can also specify additional files separated by a ",".
# Note: The post-processed additional_files will be sent to %HPCROOT%/LOG_%EXPID%
## Path relative to the project directory
# FILE:
## Platform to execute the job. If not specified, defaults to HPCARCH in expedf file.
## LOCAL is always defined and refers to current machine
# PLATFORM:
## Queue to add the job to. If not specified, uses PLATFORM default.
# QUEUE:
## Defines dependencies from job as a list of parents jobs separated by spaces.
## Dependencies to jobs in previous chunk, member o startdate, use -(DISTANCE)
# DEPENDENCIES:INI SIM-1 CLEAN-2
## Define if jobs runs once, once per stardate, once per member or once per chunk. Options: once, date, member, chunk.
## If not specified, defaults to once
# RUNNING:once
## Specifies that job has only to be run after X dates, members or chunk. A job will always be created for the last
## If not specified, defaults to 1
# FREQUENCY:3
## On a job with FREQUENCY > 1, if True, the dependencies are evaluated against all
## jobs in the frequency interval, otherwise only evaluate dependencies against current
## iteration.
## If not specified, defaults to True
# WAIT:False
## Defines if job is only to be executed in reruns. If not specified, defaults to false.
# RERUN_ONLY:False
## Wallclock to be submitted to the HPC queue in format HH:MM
# WALLCLOCK:00:05
## Processors number to be submitted to the HPC. If not specified, defaults to 1.
## Wallclock chunk increase (WALLCLOCK will be increased according to the formula WALLCLOCK + WCHUNKINC * (chunk - 1)).
## Ideal for sequences of jobs that change their expected running time according to the current chunk.
# WCHUNKINC: 00:01
# PROCESSORS: 1
## Threads number to be submitted to the HPC. If not specified, defaults to 1.
# THREADS: 1
## Enables hyper-threading. If not specified, defaults to false.
# HYPERTHREADING: false
## Tasks number to be submitted to the HPC. If not specified, defaults to 1.
# Tasks: 1
## Memory requirements for the job in MB
# MEMORY: 4096
## Number of retrials if a job fails. If not specified, defaults to the value given on experiment's autosubmit.yml
# RETRIALS: 4
## Allows to put a delay between retries, of retrials if a job fails. If not specified, it will be static
# DELAY_RETRY_TIME: 11
# DELAY_RETRY_TIME: +11 # will wait 11,22,33,44...
# DELAY_RETRY_TIME: *11 # will wait 11,110,1110,11110...
## Some jobs can not be checked before running previous jobs. Set this option to false if that is the case
# CHECK: False
## Select the interpreter that will run the job. Options: bash, python, r Default: bash
# TYPE: bash
## Specify the path to the interpreter. If empty, use system default based on job type . Default: empty
# EXECUTABLE: /my_python_env/python3
TRANSFER_PROJECT: TRANSFER_PROJECT:
FILE: templates/common/transfer_project.sh FILE: templates/common/transfer_project.sh
PLATFORM: LOCAL PLATFORM: LOCAL
...@@ -65,6 +8,8 @@ JOBS: ...@@ -65,6 +8,8 @@ JOBS:
DEPENDENCIES: TRANSFER_PROJECT DEPENDENCIES: TRANSFER_PROJECT
WALLCLOCK: 04:00 WALLCLOCK: 04:00
PROCESSORS: 16 PROCESSORS: 16
RETRIALS: 2 # retry because spack downloads sometimes timeout
NODES: 1
BUILD_PYTHON_ENVIRONMENT: BUILD_PYTHON_ENVIRONMENT:
FILE: templates/common/build_python_environment.sh FILE: templates/common/build_python_environment.sh
...@@ -72,6 +17,7 @@ JOBS: ...@@ -72,6 +17,7 @@ JOBS:
DEPENDENCIES: BUILD_ICON DEPENDENCIES: BUILD_ICON
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PROCESSORS: 16 PROCESSORS: 16
NODES: 1
PREPARE_EXPERIMENT: PREPARE_EXPERIMENT:
FILE: templates/real-from-dwd-ana/prepare_experiment.sh FILE: templates/real-from-dwd-ana/prepare_experiment.sh
...@@ -79,34 +25,42 @@ JOBS: ...@@ -79,34 +25,42 @@ JOBS:
RUNNING: once RUNNING: once
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PREPARE_DATE: PREPARE_DATE_LOCAL:
FILE: templates/real-from-dwd-ana/prepare_date.sh FILE: templates/real-from-dwd-ana/prepare_date_local.sh
RUNNING: date RUNNING: date
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PLATFORM: LOCAL
PREPARE_DATE_REMOTE:
FILE: templates/real-from-dwd-ana/prepare_date_remote.sh
RUNNING: date
WALLCLOCK: 01:00
PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
PREPARE_MEMBER: PREPARE_MEMBER:
FILE: templates/real-from-dwd-ana/prepare_member.sh FILE: templates/real-from-dwd-ana/prepare_member.sh
DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE_REMOTE PREPARE_DATE_LOCAL
RUNNING: member RUNNING: member
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PREPARE_CHUNK: PREPARE_NAMELIST:
FILE: templates/real-from-dwd-ana/prepare_chunk.py FILE: templates/real-from-dwd-ana/prepare_namelist.py
DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1 DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
WALLCLOCK: 00:05 WALLCLOCK: 00:05
RUNNING: chunk RUNNING: chunk
TYPE: python TYPE: python
EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3" EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
RUN_ICON: RUN_ICON:
FILE: templates/common/run_icon.sh FILE: templates/common/run_icon.sh
DEPENDENCIES: PREPARE_CHUNK DEPENDENCIES: PREPARE_NAMELIST
WALLCLOCK: 04:00 WALLCLOCK: 04:00
RUNNING: chunk RUNNING: chunk
PROCESSORS: 64 PROCESSORS: 64
MEMORY: 81920 MEMORY: 81920
CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive"] CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive" ]
COMPRESS: COMPRESS:
FILE: templates/common/compress.py FILE: templates/common/compress.py
...@@ -130,4 +84,5 @@ JOBS: ...@@ -130,4 +84,5 @@ JOBS:
FILE: templates/common/clean.sh FILE: templates/common/clean.sh
DEPENDENCIES: TRANSFER DEPENDENCIES: TRANSFER
WALLCLOCK: 00:10 WALLCLOCK: 00:10
RUNNING: member RUNNING: member
\ No newline at end of file PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
spack:
url: git@gitlab.physik.uni-muenchen.de:LDAP_rbg/spack.git
branch: lmu/ubuntu20.04-icon
compiler: gcc@11.3.0
icon:
version: 2.6.5-nwp0
python_environment:
# Name of the virtual environment in the remote platform experiment folder
folder_name: python_environment
requirements:
# Because there's an issue with numba, for now we need to keep a specific version of numpy
- numpy==1.23
- enstools-compression
# Just to try a library from a git repository.
- git+https://gitlab.physik.uni-muenchen.de/Oriol.Tinto/otils.git
- f90nml
simulation: simulation:
dynamics_grid_filename: icon_grid_0016_R02B06_G.nc dynamics_grid_filename: icon_grid_0016_R02B06_G.nc
radiation_grid_filename: icon_grid_0015_R02B05_R.nc radiation_grid_filename: icon_grid_0015_R02B05_R.nc
...@@ -31,12 +12,9 @@ simulation: ...@@ -31,12 +12,9 @@ simulation:
output_file_names: "init_DOM01_ML_*.nc latbc_DOM01_ML_*.nc" output_file_names: "init_DOM01_ML_*.nc latbc_DOM01_ML_*.nc"
files_to_clean: "*.nc" files_to_clean: "*.nc"
data_management: initial_conditions:
# Where do we put the output files afterwards? # Where are we getting our initial data from?
local_destination_folder: /scratch/o/Oriol.Tinto/tmp/ local: true
parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/
initial_conditions: member: 1
# Where are we getting our initial data from?
parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/
member: 1
# Example job with all options specified
JOBS: JOBS:
## Job name
# JOBNAME:
## Script to execute. If not specified, job will be omitted from workflow. "You can also specify additional files separated by a ",".
# Note: The post-processed additional_files will be sent to %HPCROOT%/LOG_%EXPID%
## Path relative to the project directory
# FILE:
## Platform to execute the job. If not specified, defaults to HPCARCH in expedf file.
## LOCAL is always defined and refers to current machine
# PLATFORM:
## Queue to add the job to. If not specified, uses PLATFORM default.
# QUEUE:
## Defines dependencies from job as a list of parents jobs separated by spaces.
## Dependencies to jobs in previous chunk, member o startdate, use -(DISTANCE)
# DEPENDENCIES:INI SIM-1 CLEAN-2
## Define if jobs runs once, once per stardate, once per member or once per chunk. Options: once, date, member, chunk.
## If not specified, defaults to once
# RUNNING:once
## Specifies that job has only to be run after X dates, members or chunk. A job will always be created for the last
## If not specified, defaults to 1
# FREQUENCY:3
## On a job with FREQUENCY > 1, if True, the dependencies are evaluated against all
## jobs in the frequency interval, otherwise only evaluate dependencies against current
## iteration.
## If not specified, defaults to True
# WAIT:False
## Defines if job is only to be executed in reruns. If not specified, defaults to false.
# RERUN_ONLY:False
## Wallclock to be submitted to the HPC queue in format HH:MM
# WALLCLOCK:00:05
## Processors number to be submitted to the HPC. If not specified, defaults to 1.
## Wallclock chunk increase (WALLCLOCK will be increased according to the formula WALLCLOCK + WCHUNKINC * (chunk - 1)).
## Ideal for sequences of jobs that change their expected running time according to the current chunk.
# WCHUNKINC: 00:01
# PROCESSORS: 1
## Threads number to be submitted to the HPC. If not specified, defaults to 1.
# THREADS: 1
## Enables hyper-threading. If not specified, defaults to false.
# HYPERTHREADING: false
## Tasks number to be submitted to the HPC. If not specified, defaults to 1.
# Tasks: 1
## Memory requirements for the job in MB
# MEMORY: 4096
## Number of retrials if a job fails. If not specified, defaults to the value given on experiment's autosubmit.yml
# RETRIALS: 4
## Allows to put a delay between retries, of retrials if a job fails. If not specified, it will be static
# DELAY_RETRY_TIME: 11
# DELAY_RETRY_TIME: +11 # will wait 11,22,33,44...
# DELAY_RETRY_TIME: *11 # will wait 11,110,1110,11110...
## Some jobs can not be checked before running previous jobs. Set this option to false if that is the case
# CHECK: False
## Select the interpreter that will run the job. Options: bash, python, r Default: bash
# TYPE: bash
## Specify the path to the interpreter. If empty, use system default based on job type . Default: empty
# EXECUTABLE: /my_python_env/python3
TRANSFER_PROJECT: TRANSFER_PROJECT:
FILE: templates/common/transfer_project.sh FILE: templates/common/transfer_project.sh
PLATFORM: LOCAL PLATFORM: LOCAL
...@@ -62,8 +6,10 @@ JOBS: ...@@ -62,8 +6,10 @@ JOBS:
BUILD_ICON: BUILD_ICON:
FILE: templates/common/build_icon.sh FILE: templates/common/build_icon.sh
DEPENDENCIES: TRANSFER_PROJECT DEPENDENCIES: TRANSFER_PROJECT
WALLCLOCK: 01:00 WALLCLOCK: 04:00
PROCESSORS: 16 PROCESSORS: 16
RETRIALS: 2 # retry because spack downloads sometimes timeout
NODES: 1
BUILD_PYTHON_ENVIRONMENT: BUILD_PYTHON_ENVIRONMENT:
FILE: templates/common/build_python_environment.sh FILE: templates/common/build_python_environment.sh
...@@ -71,20 +17,21 @@ JOBS: ...@@ -71,20 +17,21 @@ JOBS:
DEPENDENCIES: BUILD_ICON DEPENDENCIES: BUILD_ICON
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PROCESSORS: 16 PROCESSORS: 16
NODES: 1
PREPARE_EXPERIMENT: PREPARE_EXPERIMENT:
FILE: templates/real-from-ideal/prepare_experiment.sh FILE: templates/real-from-ideal/prepare_experiment.sh
DEPENDENCIES: BUILD_ICON DEPENDENCIES: BUILD_ICON
RUNNING: once RUNNING: once
WALLCLOCK: 00:10 WALLCLOCK: 00:10
PLATFORM: LOGIN PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
PREPARE_IDEAL_DIRECTORY: PREPARE_IDEAL_DIRECTORY:
FILE: templates/real-from-ideal/prepare_ideal_directory.sh FILE: templates/real-from-ideal/prepare_ideal_directory.sh
DEPENDENCIES: PREPARE_EXPERIMENT DEPENDENCIES: PREPARE_EXPERIMENT
RUNNING: date RUNNING: date
WALLCLOCK: 00:10 WALLCLOCK: 00:10
PLATFORM: LOGIN PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
PREPARE_IDEAL_NAMELIST: PREPARE_IDEAL_NAMELIST:
FILE: templates/real-from-ideal/prepare_ideal_namelist.py FILE: templates/real-from-ideal/prepare_ideal_namelist.py
...@@ -93,7 +40,7 @@ JOBS: ...@@ -93,7 +40,7 @@ JOBS:
WALLCLOCK: 00:10 WALLCLOCK: 00:10
TYPE: python TYPE: python
EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3" EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
PLATFORM: LOGIN PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
RUN_IDEAL: RUN_IDEAL:
FILE: templates/real-from-ideal/run_ideal.sh FILE: templates/real-from-ideal/run_ideal.sh
...@@ -123,23 +70,24 @@ JOBS: ...@@ -123,23 +70,24 @@ JOBS:
DEPENDENCIES: FG_ANA_FROM_IDEALIZED EXTPAR_FROM_IDEALIZED DEPENDENCIES: FG_ANA_FROM_IDEALIZED EXTPAR_FROM_IDEALIZED
RUNNING: member RUNNING: member
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PLATFORM: LOGIN PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
PREPARE_CHUNK: PREPARE_NAMELIST:
FILE: templates/real-from-ideal/prepare_chunk.py FILE: templates/real-from-ideal/prepare_namelist.py
DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1 DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
WALLCLOCK: 00:05 WALLCLOCK: 00:05
RUNNING: chunk RUNNING: chunk
TYPE: python TYPE: python
EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3" EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
PLATFORM: LOGIN PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
RUN_ICON: RUN_ICON:
FILE: templates/common/run_icon.sh FILE: templates/common/run_icon.sh
DEPENDENCIES: PREPARE_CHUNK DEPENDENCIES: PREPARE_NAMELIST
WALLCLOCK: 01:00 WALLCLOCK: 01:00
RUNNING: chunk RUNNING: chunk
PROCESSORS: 16 PROCESSORS: 16
NODES: 1
COMPRESS: COMPRESS:
FILE: templates/common/compress.py FILE: templates/common/compress.py
...@@ -160,4 +108,4 @@ JOBS: ...@@ -160,4 +108,4 @@ JOBS:
DEPENDENCIES: TRANSFER DEPENDENCIES: TRANSFER
WALLCLOCK: 00:10 WALLCLOCK: 00:10
RUNNING: member RUNNING: member
PLATFORM: LOGIN PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
spack:
url: git@gitlab.physik.uni-muenchen.de:LDAP_rbg/spack.git
branch: lmu/ubuntu20.04-icon
compiler: gcc@11.3.0
icon:
version: 2.6.5-nwp0
python_environment:
# Name of the virtual environment in the remote platform experiment folder
folder_name: python_environment
requirements:
# Because there's an issue with numba, for now we need to keep a specific version of numpy
- numpy==1.23
- enstools-compression
# Just to try a library from a git repository.
- git+https://gitlab.physik.uni-muenchen.de/Oriol.Tinto/otils.git
- f90nml
simulation: simulation:
dynamics_grid_filename: icon_grid_0012_R02B04_G.nc dynamics_grid_filename: icon_grid_0012_R02B04_G.nc
radiation_grid_filename: icon_grid_0011_R02B03_R.nc radiation_grid_filename: icon_grid_0011_R02B03_R.nc
...@@ -32,8 +13,4 @@ simulation: ...@@ -32,8 +13,4 @@ simulation:
# List of output file names that will be copied (Wildcards * allowed) # List of output file names that will be copied (Wildcards * allowed)
output_file_names: "latbc_DOM01_ML_*.nc" output_file_names: "latbc_DOM01_ML_*.nc"
files_to_clean: "*.nc" files_to_clean: "*.nc"
\ No newline at end of file
data_management:
# Where do we put the output files afterwards?
local_destination_folder: /scratch/o/Oriol.Tinto/tmp/
function spack_env() { function _install_spack() {
export SPACK_SETUP_ENV=spack/share/spack/setup-env.sh if [ ! -e ${SPACK_ROOT} ]; then
export SPACK_VENV=spack_icon_env echo "Cloning to ${SPACK_ROOT}"
export SPACK_USER_CACHE_PATH=${WORKDIR}/SPACK_USER_CACHE_PATH git clone --depth 1 ${SPACK_URL} -b ${SPACK_BRANCH} ${SPACK_ROOT}
export SPACK_DISABLE_LOCAL_CONFIG=true fi
}
_init_spack "$SPACK_INIT_CMD" "$SPACK_ROOT"
function install_spack() { if [ ! -z "${SPACK_EXTERNALS}" ] ; then
spack_env for ext in ${SPACK_EXTERNALS}; do
spack external find $ext
done
fi
#TODO: Would be good to enable the re-utilization of existing spack packages (via packages.yaml or upstreams.yaml) if [[ $(spack compiler info ${SPACK_COMPILER}) ]]; then
if [ ! -f ${SPACK_SETUP_ENV} ]; then echo "Found Compiler $(spack compiler info ${SPACK_COMPILER})"
git clone ${SPACK_URL} -b ${SPACK_BRANCH} else
fi echo "could not find compiler, will now try to install it... this may take a while"
spack install --reuse ${SPACK_COMPILER}
spack compiler add $(spack location --install-dir $SPACK_COMPILER)
fi
}
. ${SPACK_SETUP_ENV} function _init_spack() {
spack env create $SPACK_VENV SPACK_INIT_CMD=$1
spack env activate -p $SPACK_VENV SPACK_ROOT=$2
spack compiler find if [ -z "$SPACK_INIT_CMD" ] && [ ! -z "${SPACK_ROOT}" ]; then
echo "Empty SPACK_INIT_CMD -> trying to source config file of spack root: $SPACK_ROOT/share/spack/setup-env.sh"
. $SPACK_ROOT/share/spack/setup-env.sh
else
echo "Executing SPACK_INIT_CMD: $SPACK_INIT_CMD"
$SPACK_INIT_CMD
fi
} }
function load_spack() { function load_spack() {
spack_env export SPACK_INIT_CMD=$1
if [ ! -f ${SPACK_SETUP_ENV} ]; then install_spack; fi export SPACK_ROOT=$2 # i.e.: spack
. ${SPACK_SETUP_ENV} export SPACK_URL=$3 # i.e.: https://github.com/spack/spack.git
spack env activate -p $SPACK_VENV export SPACK_BRANCH=$4 # i.e.: develop
export SPACK_EXTERNALS=$5 # i.e.: slurm
export SPACK_COMPILER=$6 # i.e.: gcc@12.2.0
export SPACK_DISABLE_LOCAL_CONFIG=$7 # i.e.: true
export SPACK_USER_CACHE_PATH=$8 # i.e.: ${SPACK_ROOT}/spack_user_cache_path
export SPACK_USER_CONFIG_PATH=$9 # i.e.: ${SPACK_ROOT}/spack_user_config_path
if [ "$SPACK_DISABLE_LOCAL_CONFIG" != "True" ]; then
unset SPACK_DISABLE_LOCAL_CONFIG
fi
echo "SPACK_INIT_CMD = $SPACK_INIT_CMD"
echo "SPACK_ROOT = $SPACK_ROOT"
echo "SPACK_URL = $SPACK_URL"
echo "SPACK_BRANCH = $SPACK_BRANCH"
echo "SPACK_EXTERNALS = $SPACK_EXTERNALS"
echo "SPACK_COMPILER = $SPACK_COMPILER"
echo "SPACK_DISABLE_LOCAL_CONFIG = ${SPACK_DISABLE_LOCAL_CONFIG:-False}"
echo "SPACK_USER_CACHE_PATH = $SPACK_USER_CACHE_PATH"
echo "SPACK_USER_CONFIG_PATH = $SPACK_USER_CONFIG_PATH"
if [ -z "$SPACK_USER_CACHE_PATH" ]; then
unset SPACK_USER_CACHE_PATH
fi
if [ ! -z "${SPACK_ROOT}" ] ; then _install_spack; fi
if [ -z "$SPACK_USER_CONFIG_PATH" ]; then
unset SPACK_USER_CONFIG_PATH
fi
_init_spack "$SPACK_INIT_CMD" "$SPACK_ROOT"
echo "Using spack from $(which spack)"
} }
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
# ----------------------------------------------------------------------------
# If you submit this package back to Spack as a pull request,
# please first remove this boilerplate and all FIXME comments.
#
# This is a template package file for Spack. We've put "FIXME"
# next to all the things you'll want to change. Once you've handled
# them, you can save this file and test your package like this:
#
# spack install dwd-icon-tools
#
# You can edit this file again by typing:
#
# spack edit dwd-icon-tools
#
# See the Spack documentation for more information on packaging.
# ----------------------------------------------------------------------------
from spack.package import *
from pathlib import Path
class DwdIconTools(Package):
"""DWD Icon Tools"""
homepage = "https://www.example.com"
# maintainers("oriol.tinto")
git = "ssh://git@gitlab.lrz.de/dkrz-mirror/dwd_icon_tools.git"
version("2.5.2", branch="icontools-2.5.2")
depends_on("netcdf-c")
depends_on("eccodes")
def patch(self):
"""
Because of the lack of access rights to the original submodule repositories,
we patch the gitmodules file to point to a different mirror.
"""
git_submodules_file = Path().cwd() / ".gitmodules"
git_mirror = "git@gitlab.lrz.de:dkrz-mirror"
git_modules_patch = f"""
[submodule "externals/libcdi"]
path = externals/libcdi
url = {git_mirror}/libcdi.git
"""
# Replace the content of the original file with the patch
with git_submodules_file.open("w") as out_f:
out_f.write(git_modules_patch)
# Run git submodule update
git = which("git")
git("submodule", "update", "--init", "--recursive")
def setup_build_environment(self, env):
spec = self.spec
# Some environment variables to set
env_variables_to_set = {
# "CC": spec["mpi"].mpicc,
# "FC": spec["mpi"].mpifc,
# "F77": spec["mpi"].mpif77,
"CXXFLAGS": "-O2 -g -fopenmp -Wunused -DNOMPI",
"FCFLAGS": "-I/usr/include --std=f2008 -O2 -g -cpp -fopenmp -fbounds-check -Wunused -DNOMPI",
"LIBS": "-leccodes -lgfortran -lhdf5 -lxml2",
}
for variable, value in env_variables_to_set.items():
env.set(variable, value)
def install(self, spec, prefix):
options = [
f"--prefix={prefix}",
"--enable-grib2",
"--enable-iso-c-interface",
f"--with-netcdf={spec['netcdf-c'].prefix}"
]
configure(*options)
make()
make("install")
--- a/gettext-runtime/gnulib-lib/xalloc-oversized.h 2020-08-21 07:51:29.459375578 -0700
+++ b/gettext-runtime/gnulib-lib/xalloc-oversized.h 2020-08-21 07:53:18.571795663 -0700
@@ -41,10 +41,10 @@
positive and N must be nonnegative. This is a macro, not a
function, so that it works correctly even when SIZE_MAX < N. */
-#if 7 <= __GNUC__
+#if 7 <= __GNUC__ && !defined __NVCOMPILER
# define xalloc_oversized(n, s) \
__builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
-#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
+#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ && !defined __NVCOMPILER
# define xalloc_oversized(n, s) \
(__builtin_constant_p (n) && __builtin_constant_p (s) \
? __xalloc_oversized (n, s) \
--- a/gettext-runtime/gnulib-lib/intprops.h 2020-08-21 07:51:20.668341900 -0700
+++ b/gettext-runtime/gnulib-lib/intprops.h 2020-08-21 07:52:43.906661856 -0700
@@ -222,7 +222,7 @@
/* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
(A, B, P) work when P is non-null. */
-#if 5 <= __GNUC__ && !defined __ICC
+#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
-#elif defined __has_builtin
+#elif defined __has_builtin && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
@@ -240,7 +240,7 @@
/* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
__builtin_mul_overflow_p and __builtin_mul_overflow_p. */
-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
+#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
/* The _GL*_OVERFLOW macros have the same restrictions as the
*_RANGE_OVERFLOW macros, except that they do not assume that operands
--- a/gettext-tools/gnulib-lib/xalloc-oversized.h 2020-08-21 10:19:23.875281647 -0700
+++ b/gettext-tools/gnulib-lib/xalloc-oversized.h 2020-08-21 10:20:40.650583499 -0700
@@ -41,10 +41,10 @@
positive and N must be nonnegative. This is a macro, not a
function, so that it works correctly even when SIZE_MAX < N. */
-#if 7 <= __GNUC__
+#if 7 <= __GNUC__ && !defined __NVCOMPILER
# define xalloc_oversized(n, s) \
__builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
-#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
+#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ && !defined __NVCOMPILER
# define xalloc_oversized(n, s) \
(__builtin_constant_p (n) && __builtin_constant_p (s) \
? __xalloc_oversized (n, s) \
--- a/gettext-tools/gnulib-lib/intprops.h 2020-08-21 10:18:38.650103825 -0700
+++ b/gettext-tools/gnulib-lib/intprops.h 2020-08-21 10:19:12.379236445 -0700
@@ -222,7 +222,7 @@
/* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
(A, B, P) work when P is non-null. */
-#if 5 <= __GNUC__ && !defined __ICC
+#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
-#elif defined __has_builtin
+#elif defined __has_builtin && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
@@ -240,7 +240,7 @@
/* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
__builtin_mul_overflow_p and __builtin_mul_overflow_p. */
-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
+#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
/* The _GL*_OVERFLOW macros have the same restrictions as the
*_RANGE_OVERFLOW macros, except that they do not assume that operands
--- a/gettext-tools/libgrep/intprops.h 2020-08-21 10:31:00.726022663 -0700
+++ b/gettext-tools/libgrep/intprops.h 2020-08-21 10:31:29.946137693 -0700
@@ -222,7 +222,7 @@
/* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
(A, B, P) work when P is non-null. */
-#if 5 <= __GNUC__ && !defined __ICC
+#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
-#elif defined __has_builtin
+#elif defined __has_builtin && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
@@ -240,7 +240,7 @@
/* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
__builtin_mul_overflow_p and __builtin_mul_overflow_p. */
-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
+#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
/* The _GL*_OVERFLOW macros have the same restrictions as the
*_RANGE_OVERFLOW macros, except that they do not assume that operands
--- a/gettext-tools/libgettextpo/xalloc-oversized.h 2020-08-21 11:19:50.065564273 -0700
+++ b/gettext-tools/libgettextpo/xalloc-oversized.h 2020-08-21 11:21:14.732898185 -0700
@@ -41,10 +41,10 @@
positive and N must be nonnegative. This is a macro, not a
function, so that it works correctly even when SIZE_MAX < N. */
-#if 7 <= __GNUC__
+#if 7 <= __GNUC__ && !defined __NVCOMPILER
# define xalloc_oversized(n, s) \
__builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
-#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
+#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ && !defined __NVCOMPILER
# define xalloc_oversized(n, s) \
(__builtin_constant_p (n) && __builtin_constant_p (s) \
? __xalloc_oversized (n, s) \
--- a/gettext-tools/libgettextpo/intprops.h 2020-08-21 11:19:58.703598336 -0700
+++ b/gettext-tools/libgettextpo/intprops.h 2020-08-21 11:20:37.612751786 -0700
@@ -222,7 +222,7 @@
/* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
(A, B, P) work when P is non-null. */
-#if 5 <= __GNUC__ && !defined __ICC
+#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
-#elif defined __has_builtin
+#elif defined __has_builtin && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
@@ -240,7 +240,7 @@
/* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
__builtin_mul_overflow_p and __builtin_mul_overflow_p. */
-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
+#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
/* The _GL*_OVERFLOW macros have the same restrictions as the
*_RANGE_OVERFLOW macros, except that they do not assume that operands
--- a/libtextstyle/lib/xalloc-oversized.h 2020-08-21 11:30:13.488022919 -0700
+++ b/libtextstyle/lib/xalloc-oversized.h 2020-08-21 11:31:26.561311097 -0700
@@ -41,10 +41,10 @@
positive and N must be nonnegative. This is a macro, not a
function, so that it works correctly even when SIZE_MAX < N. */
-#if 7 <= __GNUC__
+#if 7 <= __GNUC__ && !defined __NVCOMPILER
# define xalloc_oversized(n, s) \
__builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
-#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
+#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ && !defined __NVCOMPILER
# define xalloc_oversized(n, s) \
(__builtin_constant_p (n) && __builtin_constant_p (s) \
? __xalloc_oversized (n, s) \
--- a/libtextstyle/lib/intprops.h 2020-08-21 11:30:24.283065492 -0700
+++ b/libtextstyle/lib/intprops.h 2020-08-21 11:30:54.415184325 -0700
@@ -222,7 +222,7 @@
/* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
(A, B, P) work when P is non-null. */
-#if 5 <= __GNUC__ && !defined __ICC
+#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
-#elif defined __has_builtin
+#elif defined __has_builtin && !defined __NVCOMPILER
# define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
@@ -240,7 +240,7 @@
/* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
__builtin_mul_overflow_p and __builtin_mul_overflow_p. */
-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
+#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
/* The _GL*_OVERFLOW macros have the same restrictions as the
*_RANGE_OVERFLOW macros, except that they do not assume that operands
--- a/gettext-runtime/intl/Makefile.in 2020-08-21 08:39:59.102729081 -0700
+++ b/gettext-runtime/intl/Makefile.in 2020-08-21 08:40:07.425761760 -0700
@@ -1471,7 +1471,6 @@
OTHER_LDFLAGS = \
@LTLIBICONV@ @INTL_MACOSX_LIBS@ $(INTL_WINDOWS_LIBS) @LTLIBTHREAD@ \
-no-undefined \
- -export-symbols-regex '^([^g]|g[^l]|gl[^w]|glw[^t]|glwt[^h]|glwth[^r]|glwthr[^e]|glwthre[^a]|glwthrea[^d]).*' \
-version-info $(LTV_CURRENT):$(LTV_REVISION):$(LTV_AGE) \
-rpath $(libdir)
--- a/gettext-tools/intl/Makefile.in 2020-08-21 07:57:18.357721212 -0700
+++ b/gettext-tools/intl/Makefile.in 2020-08-21 07:57:29.051762490 -0700
@@ -2296,7 +2296,6 @@
OTHER_LDFLAGS = \
@LTLIBICONV@ @INTL_MACOSX_LIBS@ $(INTL_WINDOWS_LIBS) @LTLIBTHREAD@ \
-no-undefined \
- -export-symbols-regex '^([^g]|g[^l]|gl[^w]|glw[^t]|glwt[^h]|glwth[^r]|glwthr[^e]|glwthre[^a]|glwthrea[^d]).*' \
-version-info $(LTV_CURRENT):$(LTV_REVISION):$(LTV_AGE) \
-rpath $(libdir)
--- a/libtextstyle/lib/Makefile.in 2020-08-21 08:49:08.277982271 -0700
+++ b/libtextstyle/lib/Makefile.in 2020-08-21 08:49:19.675030561 -0700
@@ -1917,7 +1917,7 @@
libtextstyle_la_LDFLAGS = $(AM_LDFLAGS) -no-undefined $(FABS_LIBM) \
$(ISNAND_LIBM) $(ISNANF_LIBM) $(ISNANL_LIBM) $(LOG10_LIBM) \
$(LTLIBICONV) $(LTLIBINTL) $(POW_LIBM) $(am__append_8) \
- -no-undefined -export-symbols libtextstyle.sym -version-info \
+ -no-undefined -version-info \
$(LTV_CURRENT):$(LTV_REVISION):$(LTV_AGE) -rpath $(libdir)
# Use this preprocessor expression to decide whether #include_next works.
--- a/gettext-tools/libgrep/regex_internal.h 2020-08-21 09:14:20.039942370 -0700
+++ b/gettext-tools/libgrep/regex_internal.h 2020-08-21 10:06:57.840331452 -0700
@@ -35,6 +35,14 @@
#include <intprops.h>
#include <verify.h>
+#ifndef __LONG_WIDTH__
+#if LONG_WIDTH
+#define __LONG_WIDTH__ LONG_WIDTH
+#else
+#define __LONG_WIDTH__ __WORDSIZE
+#endif
+#endif
+
#if defined DEBUG && DEBUG != 0
# include <assert.h>
# define DEBUG_ASSERT(x) assert (x)
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import re
from spack.package import *
class Gettext(AutotoolsPackage, GNUMirrorPackage):
"""GNU internationalization (i18n) and localization (l10n) library."""
homepage = "https://www.gnu.org/software/gettext/"
gnu_mirror_path = "gettext/gettext-0.20.1.tar.xz"
maintainers("michaelkuhn")
executables = [r"^gettext$"]
# buggy install when calling msgfmt version("0.21.1", sha256="50dbc8f39797950aa2c98e939947c527e5ac9ebd2c1b99dd7b06ba33a6767ae6")
# buggy install when calling msgfmt version("0.21", sha256="d20fcbb537e02dcf1383197ba05bd0734ef7bf5db06bdb241eb69b7d16b73192")
version("0.20.2", sha256="b22b818e644c37f6e3d1643a1943c32c3a9bff726d601e53047d2682019ceaba")
version("0.20.1", sha256="53f02fbbec9e798b0faaf7c73272f83608e835c6288dd58be6c9bb54624a3800")
version("0.19.8.1", sha256="105556dbc5c3fbbc2aa0edb46d22d055748b6f5c7cd7a8d99f8e7eb84e938be4")
version("0.19.7", sha256="378fa86a091cec3acdece3c961bb8d8c0689906287809a8daa79dc0c6398d934")
# Recommended variants
variant("curses", default=True, description="Use libncurses")
variant("libxml2", default=True, description="Use libxml2")
variant("git", default=True, description="Enable git support")
variant("tar", default=True, description="Enable tar support")
variant("bzip2", default=True, description="Enable bzip2 support")
variant("xz", default=True, description="Enable xz support")
# Optional variants
variant("libunistring", default=False, description="Use libunistring")
depends_on("iconv")
# Recommended dependencies
depends_on("ncurses", when="+curses")
depends_on("libxml2", when="+libxml2")
# Java runtime and compiler (e.g. GNU gcj or kaffe)
# C# runtime and compiler (e.g. pnet or mono)
depends_on("tar", when="+tar")
# depends_on('gzip', when='+gzip')
depends_on("bzip2", when="+bzip2")
depends_on("xz", when="+xz", type=("build", "link", "run"))
# Optional dependencies
# depends_on('glib') # circular dependency?
# depends_on('libcroco@0.6.1:')
depends_on("libunistring", when="+libunistring")
# depends_on('cvs')
patch("test-verify-parallel-make-check.patch", when="@:0.19.8.1")
patch("nvhpc-builtin.patch", when="@:0.21.0 %nvhpc")
patch("nvhpc-export-symbols.patch", when="%nvhpc")
patch("nvhpc-long-width.patch", when="%nvhpc")
# Apply this only where we know that the system libc is glibc, be very careful:
@when("@:0.21.0 target=ppc64le:")
def patch(self):
for fn in ("gettext-tools/gnulib-lib/cdefs.h", "gettext-tools/libgrep/cdefs.h"):
with open(fn, "w") as f:
f.write("#include <sys/cdefs.h>\n")
@classmethod
def determine_version(cls, exe):
gettext = Executable(exe)
output = gettext("--version", output=str, error=str)
match = re.match(r"gettext(?: \(.+\)) ([\d.]+)", output)
return match.group(1) if match else None
def configure_args(self):
spec = self.spec
config_args = [
"--disable-java",
"--disable-csharp",
"--with-libiconv-prefix={0}".format(spec["iconv"].prefix),
"--with-included-glib",
"--with-included-gettext",
"--with-included-libcroco",
"--without-emacs",
"--with-lispdir=%s/emacs/site-lisp/gettext" % self.prefix.share,
"--without-cvs",
]
if "+curses" in spec:
config_args.append("--with-ncurses-prefix={0}".format(spec["ncurses"].prefix))
else:
config_args.append("--disable-curses")
if "+libxml2" in spec:
config_args.append("--with-libxml2-prefix={0}".format(spec["libxml2"].prefix))
else:
config_args.append("--with-included-libxml")
if "+bzip2" not in spec:
config_args.append("--without-bzip2")
if "+xz" not in spec:
config_args.append("--without-xz")
if "+libunistring" in spec:
config_args.append(
"--with-libunistring-prefix={0}".format(spec["libunistring"].prefix)
)
else:
config_args.append("--with-included-libunistring")
return config_args
@property
def libs(self):
return find_libraries(
["libasprintf", "libgettextlib", "libgettextpo", "libgettextsrc", "libintl"],
root=self.prefix,
recursive=True,
)
2017-04-20 Bruno Haible <bruno@clisp.org>
verify tests: Fix spurious failure with parallel make.
* tests/test-verify.sh: Build test-verify-try.o, not test-verify.o.
* tests/test-verify-try.c: New file.
Reported by Adam James Stewart <ajstewart@anl.gov>.
diff --git a/gettext-tools/gnulib-tests/test-verify.sh b/gettext-tools/gnulib-tests/test-verify.sh
index 3e76761..1e75d55 100755
--- a/gettext-tools/gnulib-tests/test-verify.sh
+++ b/gettext-tools/gnulib-tests/test-verify.sh
@@ -7,8 +7,9 @@ unset MALLOC_PERTURB_
# Rather than figure out how to invoke the compiler with the right
# include path ourselves, we let make do it:
-(cd "$initial_cwd_" && rm -f test-verify.o \
- && $MAKE test-verify.o >/dev/null 2>&1) \
+(cd "$initial_cwd_" \
+ && rm -f test-verify-try.o \
+ && $MAKE test-verify-try.o >/dev/null 2>&1) \
|| skip_ "cannot compile error-free"
# Now, prove that we encounter all expected compilation failures:
@@ -16,8 +17,8 @@ unset MALLOC_PERTURB_
: >err
for i in 1 2 3 4 5; do
(cd "$initial_cwd_"
- rm -f test-verify.o
- $MAKE CFLAGS=-DEXP_FAIL=$i test-verify.o) >>out 2>>err \
+ rm -f test-verify-try.o
+ $MAKE CFLAGS=-DEXP_FAIL=$i test-verify-try.o) >>out 2>>err \
&& { warn_ "compiler didn't detect verification failure $i"; fail=1; }
done
diff --git a/gettext-tools/gnulib-tests/test-verify-try.c b/gettext-tools/gnulib-tests/test-verify-try.c
new file mode 100644
index 0000000..362fb01
--- /dev/null
+++ b/tests/test-verify-try.c
@@ -0,0 +1,21 @@
+/* Test the "verify" module.
+
+ Copyright (C) 2017 Free Software Foundation, Inc.
+
+ This program is free software: you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3 of the License, or
+ (at your option) any later version.
+
+ This program is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ GNU General Public License for more details.
+
+ You should have received a copy of the GNU General Public License
+ along with this program. If not, see <http://www.gnu.org/licenses/>. */
+
+/* This is a separate source file, so that the execution of test-verify.sh
+ does not interfere with the building of the 'test-verify' program. */
+
+#include "test-verify.c"
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from pathlib import Path
from spack.package import *
class IconNwp(Package):
"""
Recipe to build ICON-NWP using the LMU gitlab repository.
"""
git = "ssh://git@gitlab.dkrz.de/icon/icon-nwp.git"
homepage = "https://code.mpimet.mpg.de/projects/iconpublic"
# maintainers("oriol.tinto")
# Version w2w-B6 points to a different repository and branch)
version("w2w-B6", git="ssh://git@gitlab.physik.uni-muenchen.de/w2w/icon-w2w.git", branch="icon-w2w/icon-nwp-B6")
# FIXME: The ugly configuration system in older icon versions prevents us from using this package with it.
# version("2.5.0-nwp3", branch="icon-nwp/op-release-2.5.0-nwp3")
version("2.6.4-nwp3", branch="icon-nwp/op-release-2.6.4-nwp3")
version("2.6.5-nwp0", branch="icon-nwp/op-release-2.6.5-nwp0")
version("master", branch="master")
version("psp", branch="icon-nwp/icon-nwp-psp")
variant("lmu", default=False, description="if git.url and submodules should be patched to use the LMU mirrors")
# Dependencies
depends_on("mpi")
depends_on("netcdf-c")
depends_on("netcdf-fortran")
depends_on("eccodes+fortran")
depends_on("libxml2")
# Openblas? best way of doing it?
depends_on("openblas", when="%gcc")
depends_on("intel-mkl", when="%intel")
# Extra dependencies for B6, including yaml, and the hdf5 filters for compression.
depends_on("libyaml", when="@w2w-B6")
depends_on("hdf5-blosc", when="@w2w-B6")
depends_on("h5z-zfp", when="@w2w-B6")
depends_on("sz~hdf5", when="@w2w-B6")
depends_on("sz3~hdf5", when="@w2w-B6")
phases = ["configure", "build", "install"]
def do_fetch(self, mirror_only=False):
if "+lmu" in self.spec:
self.fetcher[0].url = self.git = "ssh://git@gitlab.physik.uni-muenchen.de/w2w/icon.git"
super(IconNwp, self).do_fetch(mirror_only)
def patch(self):
# Run git submodule update
git = which("git")
if "+lmu" in self.spec:
self.patch_submodules()
git("submodule", "update", "--init", "--recursive")
def patch_submodules(self):
"""
Because of the lack of access rights to the original submodule repositories,
we patch the gitmodules file to point to a different mirror.
"""
git_submodules_file = Path().cwd() / ".gitmodules"
git_mirror = "git@gitlab.lrz.de:dkrz-mirror"
git_modules_patch = f"""
[submodule "externals/mtime"]
path = externals/mtime
url = {git_mirror}/libmtime.git
[submodule "externals/jsbach"]
path = externals/jsbach
url = {git_mirror}/jsbach.git
[submodule "externals/yac"]
path = externals/yac
url = {git_mirror}/YAC.git
[submodule "externals/self"]
path = externals/self
url = {git_mirror}/libself.git
[submodule "externals/tixi"]
path = externals/tixi
url = {git_mirror}/libtixi.git
[submodule "externals/yaxt"]
path = externals/yaxt
url = {git_mirror}/yaxt.git
[submodule "externals/rte-rrtmgp"]
path = externals/rte-rrtmgp
url = https://github.com/earth-system-radiation/rte-rrtmgp.git
[submodule "externals/cub"]
path = externals/cub
url = https://github.com/NVlabs/cub.git
[submodule "externals/omni-xmod-pool"]
path = externals/omni-xmod-pool
url = https://github.com/claw-project/omni-xmod-pool.git
[submodule "externals/cdi"]
path = externals/cdi
url = {git_mirror}/libcdi.git
[submodule "externals/sct"]
path = externals/sct
url = {git_mirror}/sct.git
[submodule "externals/ecrad"]
path = externals/ecrad
url = {git_mirror}/libecrad.git
[submodule "externals/dace_icon"]
path = externals/dace_icon
url = {git_mirror}/dace-icon-interface.git
[submodule "externals/emvorado"]
path = externals/emvorado
url = {git_mirror}/emvorado-for-icon.git
[submodule "utils/mkexp"]
path = utils/mkexp
url = https://git.mpimet.mpg.de/public/mkexp
[submodule "externals/art"]
path = externals/art
url = {git_mirror}/art.git
[submodule "externals/ppm"]
path = externals/ppm
url = https://gitlab.dkrz.de/jahns/ppm.git
[submodule "externals/probtest"]
path = externals/probtest
url = {git_mirror}/cscs-sw_probtest.git
"""
# Replace the content of the original file with the patch
with git_submodules_file.open("w") as out_f:
out_f.write(git_modules_patch)
def setup_build_environment(self, env):
spec = self.spec
# Some environment variables to set
env_variables_to_set = {
"CC": spec["mpi"].mpicc,
"FC": spec["mpi"].mpifc,
"F77": spec["mpi"].mpif77,
}
for variable, value in env_variables_to_set.items():
env.set(variable, value)
def configure(self, spec, prefix):
spec = self.spec
print(spec["mpi"].mpifc)
libs = [
f"-L{spec['netcdf-c'].prefix.lib} -lnetcdf ", # netcdf-c libs
f"-L{spec['netcdf-fortran'].prefix.lib} -lnetcdff", # netcdf-fortran libs
f"-L{spec['eccodes'].prefix.lib} -leccodes_f90 -leccodes",
f"-L{spec['libxml2'].prefix.lib} -lxml2", # XML2 libs
]
if self.spec.satisfies("%gcc"):
libs.append("-lopenblas")
elif self.spec.satisfies("%intel"):
libs.append("-qmkl=sequential")
if self.spec.version == Version("w2w-B6"):
libs.append("-lyaml")
mtune = "generic"
INCLUDES = f"-I{spec['libxml2'].prefix}/include/libxml2"
options = [
f"CC={spec['mpi'].mpicc}",
f"FC={spec['mpi'].mpifc}",
f"CFLAGS=-g -mpc64 {INCLUDES}",
f"ICON_CFLAGS=-O3 -g -mtune={mtune}",
f"ICON_BUNDLED_CFLAGS=-O3 -mtune={mtune}",
"FCFLAGS=-std=legacy -fmodule-private -fimplicit-none -fmax-identifier-length=63 -Wall -Wcharacter-truncation -Wconversion -Wunderflow -Wunused-parameter -Wno-surprising -fall-intrinsics -g -mpc64 -w",
"ICON_FCFLAGS=-fbacktrace -fbounds-check -fstack-protector-all -finit-real=nan -finit-integer=-2147483648 -finit-character=127 -w -O2",
f"ICON_OCEAN_FCFLAGS=-O3 -mtune={mtune}",
f"LDFLAGS={' '.join(libs)}",
f"LIBS={' '.join(libs)}",
f"--prefix={prefix}",
f"--enable-grib2",
]
# For some reason there's a problem with OpenMPI with gcc@11.3.0 which makes the configuration fail.
if self.spec.compiler.name == "gcc" and self.spec.compiler.version == Version("11.3.0"):
options.append("--enable-mpi-checks=no")
configure(*options)
def build(self, spec, prefix):
make()
def install(self, spec, prefix):
make("install")
# Create extra folders with the data and the docs
self.make_extra_folders(prefix)
def setup_run_environment(self, env):
env.set("ICON_BASE_PATH", self.spec.prefix)
env.set("ICON_DATA_PATH", self.spec.prefix.join("data"))
env.set("ICON_DOCS_PATH", self.spec.prefix.join("doc"))
def make_extra_folders(self, prefix):
mkdir = which("mkdir")
rsync = which("rsync")
curl = which("curl")
# copy executables and documentation
mkdir("-p", f"{prefix}/data")
mkdir("-p", f"{prefix}/doc")
mkdir("-p", f"{prefix}/run")
rsync("-av", "data/", f"{prefix}/data")
rsync("-av", "run/", f"{prefix}/run")
rsync("-av", "--include='*.pdf'", "--exclude='*.*'", "doc/", f"{prefix}/doc/")
curl(
"https://code.mpimet.mpg.de/attachments/download/19568/ICON_tutorial_2019.pdf",
"--output",
f"{prefix}/doc/ICON_tutorial_2019.pdf",
)
curl(
"http://www.cosmo-model.org/content/model/documentation/core/emvorado_userguide.pdf",
"--output",
f"{prefix}/doc/emvorado_userguide.pdf",
)
# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Sz3(CMakePackage):
"""SZ3 is the next generation of the SZ compressor framework"""
homepage = "https://github.com/szcompressor/SZ3"
git = "https://github.com/szcompressor/SZ3"
# maintainers("disheng222")
tags = ["e4s"]
version("master")
version("3.1.7", commit="c49fd17f2d908835c41000c1286c510046c0480e")
version("3.1.5.4", commit="4c6ddf628f27d36b28d1bbda02174359cd05573d")
version("3.1.5.1", commit="5736a63b917e439dd62248b4ff6234e96726af5d")
version("3.1.3.1", commit="323cb17b412d657c4be681b52c34beaf933fe7af")
version("3.1.3", commit="695dff8dc326f3b165f6676d810f46add088a585")
variant("hdf5", default=False, description="enable hdf5 filter support")
variant("mdz", default=True, description="build mdz executable")
depends_on("zstd")
depends_on("gsl")
depends_on("pkgconfig")
depends_on("hdf5", when="+hdf5")
def setup_run_environment(self, env):
if "+hdf5" in self.spec:
env.prepend_path("HDF5_PLUGIN_PATH", self.prefix.lib64)
def cmake_args(self):
return [
"-DSZ3_USE_BUNDLED_ZSTD=OFF",
"-DSZ3_DEBUG_TIMINGS=OFF",
self.define_from_variant("BUILD_MDZ", "mdz"),
self.define_from_variant("BUILD_H5Z_FILTER", "hdf5"),
]
def test(self):
if self.spec.satisfies("@:3.1.6"):
print("smoke tests are only supported on 3.1.7 and later, skipping")
return
self.run_test(self.prefix.bin.sz3_smoke_test, purpose="sz3 works")
if "+mdz" in self.spec:
self.run_test(self.prefix.bin.mdz_smoke_test, purpose="mdz works")
repo:
namespace: 'autosubmit-icon-repository'
# Get some variables provided by autosubmit. # Get some variables provided by autosubmit.
# TODO: What do we do to ensure that these variables are defined in the proj file? # TODO: What do we do to ensure that these variables are defined in the proj file?
WORKDIR=%HPCROOTDIR% WORKDIR=%HPCROOTDIR%
ICON_VERSION=%ICON_VERSION% ICON_VERSION=%ICON.VERSION%
SPACK_URL=%spack.url% COMPILER=%SPACK.COMPILER%
SPACK_BRANCH=%spack.branch%
SPACK_COMPILER=%spack.compiler%
# If the workdir directory does not exist create it # If the workdir directory does not exist create it
if [ ! -d ${WORKDIR} ]; then if [ ! -d ${WORKDIR} ]; then
...@@ -12,44 +10,46 @@ if [ ! -d ${WORKDIR} ]; then ...@@ -12,44 +10,46 @@ if [ ! -d ${WORKDIR} ]; then
fi fi
# Go to the working directory # Go to the working directory
cd ${WORKDIR} cd ${WORKDIR} || exit
. ${WORKDIR}/proj/platforms/common/spack_utils.sh . ${WORKDIR}/proj/platforms/common/spack_utils.sh
load_spack load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%"
if [ $( if [ ! $(rpm -qa | grep bzip2) ]; then
spack find icon-nwp@${ICON_VERSION} &>/dev/null spack install --reuse bzip2
echo $? spack load --first bzip2
) -ne 0 ]; then fi
echo "Installing icon-nwp@${ICON_VERSION}."
if [[ $(spack compiler info ${SPACK_COMPILER}) ]]; then SPACK_BUILD_ICON="%ICON.BUILD_CMD%"
echo "Found Compiler" if [ ! -z "$SPACK_BUILD_ICON" ]; then
echo "Installing ICON with spack!"
echo "cmd=$SPACK_BUILD_ICON"
# In case the autosubmit repository with the icon-nwp receipt doesn't exist, add it
if [[ $(spack repo list | grep "${WORKDIR}/proj/spack_repo") ]]; then
echo "icon spack repo was already added to repo list"
else else
echo "could not find compiler, try to install it... this may take a while" spack repo add ${WORKDIR}/proj/spack_repo
spack add ${SPACK_COMPILER}
spack install
spack compiler add $(spack location --install-dir $SPACK_COMPILER)
fi fi
spack spec $SPACK_BUILD_ICON
spack add ucx%${SPACK_COMPILER}+dc+dm+ib_hw_tm+mlx5_dv+rc+rdmacm+thread_multiple+ud+verbs~cuda spack install --reuse $SPACK_BUILD_ICON
spack add openmpi%${SPACK_COMPILER}+pmi+legacylaunchers~cuda schedulers=slurm fabrics=ucx #TODO: had some problems with spack load when more than one version is available, adding --first to overcome that
spack add icon-nwp@${ICON_VERSION}%${SPACK_COMPILER} ^openmpi%${SPACK_COMPILER} # although in principle we should not install the model if its already installed.
spack install spack load --first "icon-nwp@${ICON_VERSION}%${COMPILER}"
else else
echo "icon-nwp@${ICON_VERSION} already installed!" echo "\%icon.build_cmd\% is not defined. If you want to compile icon with spack, please provide a spack compile instruction string in your build.yml"
fi fi
# Need to get ECCODES DWD definitions: # Need to get ECCODES DWD definitions:
eccodes_version=$(spack find eccodes | grep eccodes@ | cut -d "@" -f 2) eccodes_version=$(spack spec icon-nwp@${ICON_VERSION}%${COMPILER} | grep eccodes | grep -o "@.*%" | grep -o "[0-9\.]*")
definitions_tar_file=eccodes_definitions.edzw-${eccodes_version}-1.tar.bz2 definitions_tar_file=eccodes_definitions.edzw-${eccodes_version}-1.tar.bz2
if [ ! -f ${definitions_tar_file} ]; then if [ ! -f "${definitions_tar_file}" ]; then
defs_url=https://opendata.dwd.de/weather/lib/grib/${definitions_tar_file} defs_url=https://opendata.dwd.de/weather/lib/grib/${definitions_tar_file}
wget ${defs_url} wget "${defs_url}"
# Decompress definitions file # Decompress definitions file
tar -xf ${definitions_tar_file} tar -xf "${definitions_tar_file}"
# Create a file containing the environment variable that needs to be set in order to use DWD's definitions: # Create a file containing the environment variable that needs to be set in order to use DWD's definitions:
echo "export ECCODES_DEFINITION_PATH=${WORKDIR}/definitions.edzw-${eccodes_version}-1" >eccodes_defs.env echo "export ECCODES_DEFINITION_PATH=${WORKDIR}/definitions.edzw-${eccodes_version}-1" >eccodes_defs.env
fi fi
...@@ -2,41 +2,29 @@ ...@@ -2,41 +2,29 @@
# TODO: What do we do to ensure that these variables are defined in the proj file? # TODO: What do we do to ensure that these variables are defined in the proj file?
WORKDIR=%HPCROOTDIR% WORKDIR=%HPCROOTDIR%
ICON_VERSION=%ICON_VERSION% ICON_VERSION=%ICON_VERSION%
SPACK_URL=%spack.url% SPACK_URL=%SPACK.URL%
SPACK_BRANCH=%spack.branch% SPACK_BRANCH=%SPACK.BRANCH%
SPACK_COMPILER=%spack.compiler% SPACK_COMPILER=%SPACK.COMPILER%
# If the workdir directory does not exist create it
if [ ! -d ${WORKDIR} ]; then
mkdir -p ${WORKDIR}
fi
PYTHON_VERSION=%PYTHON_ENVIRONMENT.PYTHON_VERSION%
# Go to the working directory # Go to the working directory
cd ${WORKDIR} cd ${WORKDIR}
# Check if experiment's spack installation already exists, if it doesn't, clone it. . ${WORKDIR}/proj/platforms/common/spack_utils.sh
SPACK_ENV=spack/share/spack/setup-env.sh load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%"
if [ ! -f ${SPACK_ENV} ]; then
echo "Spack folder not found!"
exit 1
fi
# Setup the spack environment
source ${SPACK_ENV}
# Use spack to get a recent enough version of python3 # Use spack to get a recent enough version of python3
if [ $( spack find python@3.8: &>/dev/null if [ $(spack find python@${PYTHON_VERSION}: &>/dev/null ) ]; then
echo $? echo "python@${PYTHON_VERSION} already installed!"
) -ne 0 ]; then else
echo "Installing a version of python3" echo "Installing a version of python3"
# Compile openmpi with schedulers=slurm # Compile openmpi with schedulers=slurm
spack install python@3.8: spack install python@${PYTHON_VERSION}
else
echo "python@3.8: already installed!"
fi fi
# Load the python module # Load the python module
spack load python@3.8: spack load --first python@${PYTHON_VERSION}
PYTHON_ENVIRONMENT_FOLDER=%python_environment.folder_name% PYTHON_ENVIRONMENT_FOLDER=%python_environment.folder_name%
...@@ -56,9 +44,16 @@ ln -sf $(which python3) ${WORKDIR}/python3 ...@@ -56,9 +44,16 @@ ln -sf $(which python3) ${WORKDIR}/python3
requirements="%python_environment.requirements%" requirements="%python_environment.requirements%"
# Convert list with python format to a bash array # Convert list with python format to a bash array
requirements=($( echo ${requirements} | sed "s/'//g" | tr -d '[],')) requirements=($(echo ${requirements} | sed "s/'//g" | tr -d '[],'))
#TODO: Shouldn't be necessary but it is for now to overcome an issue with a repetition of the requirements.
# Use sort and uniq to get the unique elements
unique_requirements=($(printf "%s\n" "${requirements[@]}" | sort -u))
# Print the unique elements
echo "${unique_requirements[@]}"
# Install requirements. # Install requirements.
for requirement in ${requirements[@]} ; do for requirement in "${unique_requirements[@]}"; do
python -m pip install ${requirement} python -m pip install ${requirement}
done done
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment