Skip to content
Snippets Groups Projects
Commit a8d3ddad authored by Oriol.Tinto's avatar Oriol.Tinto
Browse files

Merge branch 'simplification' into 'real-from-dwd-ana'

Cast checkpoint time to integer, add 10 minutes to the simulation to guarantee...

See merge request w2w/autosubmit-icon-today!1
parents b7a25c0e 24747fea
No related branches found
No related tags found
2 merge requests!2Split prepare_rundir.sh into three different stages for the whole experiment,...,!1Cast checkpoint time to integer, add 10 minutes to the simulation to guarantee...
Showing
with 342 additions and 296 deletions
...@@ -57,42 +57,50 @@ JOBS: ...@@ -57,42 +57,50 @@ JOBS:
# EXECUTABLE: /my_python_env/python3 # EXECUTABLE: /my_python_env/python3
BUILD_ICON: BUILD_ICON:
FILE: templates/build_icon.sh FILE: templates/common/build_icon.sh
WALLCLOCK: 04:00 WALLCLOCK: 04:00
PROCESSORS: 16 PROCESSORS: 16
BUILD_PYTHON_ENVIRONMENT: BUILD_PYTHON_ENVIRONMENT:
FILE: templates/build_python_environment.sh FILE: templates/common/build_python_environment.sh
# Right now we rely on spack for building icon and having a python interpreter, so we need this dependency: # Right now we rely on spack for building icon and having a python interpreter, so we need this dependency:
DEPENDENCIES: BUILD_ICON DEPENDENCIES: BUILD_ICON
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PROCESSORS: 16 PROCESSORS: 16
TRANSFER_NAMELISTS:
FILE: templates/common/transfer_namelists.sh
PLATFORM: LOCAL
PREPARE_EXPERIMENT: PREPARE_EXPERIMENT:
FILE: templates/prepare_experiment.sh FILE: templates/real-from-dwd-ana/prepare_experiment.sh
DEPENDENCIES: BUILD_ICON DEPENDENCIES: BUILD_ICON
RUNNING: once RUNNING: once
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PREPARE_DATE: PREPARE_DATE:
FILE: templates/prepare_date.sh FILE: templates/real-from-dwd-ana/prepare_date.sh
RUNNING: date RUNNING: date
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PREPARE_MEMBER: PREPARE_MEMBER:
FILE: templates/prepare_member.sh FILE: templates/real-from-dwd-ana/prepare_member.sh
DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE
RUNNING: member RUNNING: member
WALLCLOCK: 01:00 WALLCLOCK: 01:00
PREPARE_CHUNK: PREPARE_CHUNK:
FILE: templates/prepare_chunk.sh FILE: templates/real-from-dwd-ana/prepare_chunk.py
DEPENDENCIES: PREPARE_MEMBER RUN_ICON-1 DEPENDENCIES: TRANSFER_NAMELISTS BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
WALLCLOCK: 00:05 WALLCLOCK: 00:05
RUNNING: chunk RUNNING: chunk
TYPE: python
EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
RUN_ICON: RUN_ICON:
FILE: templates/run_icon.sh FILE: templates/common/run_icon.sh
DEPENDENCIES: PREPARE_CHUNK DEPENDENCIES: PREPARE_CHUNK
WALLCLOCK: 04:00 WALLCLOCK: 04:00
RUNNING: chunk RUNNING: chunk
...@@ -101,7 +109,7 @@ JOBS: ...@@ -101,7 +109,7 @@ JOBS:
CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive", "export OMPI_MCA_btl_tcp_if_include=10.0.0.0/8"] CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive", "export OMPI_MCA_btl_tcp_if_include=10.0.0.0/8"]
COMPRESS: COMPRESS:
FILE: templates/compress.py FILE: templates/common/compress.py
DEPENDENCIES: RUN_ICON BUILD_PYTHON_ENVIRONMENT COMPRESS-1 DEPENDENCIES: RUN_ICON BUILD_PYTHON_ENVIRONMENT COMPRESS-1
RUNNING: chunk RUNNING: chunk
TYPE: python TYPE: python
...@@ -110,7 +118,7 @@ JOBS: ...@@ -110,7 +118,7 @@ JOBS:
MEMORY: 16384 MEMORY: 16384
TRANSFER: TRANSFER:
FILE: templates/transfer.sh FILE: templates/common/transfer.sh
DEPENDENCIES: COMPRESS DEPENDENCIES: COMPRESS
# Since this is running locally, can simply leave a long wallclock. # Since this is running locally, can simply leave a long wallclock.
WALLCLOCK: 24:00 WALLCLOCK: 24:00
...@@ -118,7 +126,7 @@ JOBS: ...@@ -118,7 +126,7 @@ JOBS:
PLATFORM: LOCAL PLATFORM: LOCAL
CLEAN: CLEAN:
FILE: templates/clean.sh FILE: templates/common/clean.sh
DEPENDENCIES: TRANSFER DEPENDENCIES: TRANSFER
WALLCLOCK: 00:10 WALLCLOCK: 00:10
RUNNING: member RUNNING: member
......
...@@ -7,32 +7,36 @@ icon: ...@@ -7,32 +7,36 @@ icon:
version: 2.6.5-nwp0 version: 2.6.5-nwp0
python_environment: python_environment:
# Name of the virtual environment in the remote platform experiment folder
folder_name: python_environment folder_name: python_environment
requirements: requirements:
# Because there's an issue with numba, for now we need to keep a specific version of numpy
- numpy==1.23 - numpy==1.23
- enstools-compression - enstools-compression
# Just to try a library from a git repository. # Just to try a library from a git repository.
- git+https://gitlab.physik.uni-muenchen.de/Oriol.Tinto/otils.git - git+https://gitlab.physik.uni-muenchen.de/Oriol.Tinto/otils.git
- f90nml
simulation: simulation:
BGRID_GLOBAL: icon_grid_0012_R02B04_G.nc dynamics_grid_filename: icon_grid_0016_R02B06_G.nc
RGRID_GLOBAL: icon_grid_0011_R02B03_R.nc radiation_grid_filename: icon_grid_0015_R02B05_R.nc
BGRID: icon_grid_0016_R02B06_G.nc external_parameters_filename: icon_extpar_0016_R02B06_G_20131206.nc
RGRID: icon_grid_0015_R02B05_R.nc date_format: '%Y-%m-%dT%H:%M:%SZ'
BEXTPAR: icon_extpar_0016_R02B06_G_20131206.nc namelist_paths:
BGRID_NEST: icon_grid_nest_G_DOM01.nc # Path to the namelists
RGRID_NEST: icon_grid_nest_R_DOM01.nc master: "%HPCROOTDIR%/namelists/icon_master_real-from-dwd-ana.namelist"
BGRID_NEST_LATBC: icon_grid_nest_G_DOM01_latbc.nc atmosphere: "%HPCROOTDIR%/namelists/icon_atmosphere_real-from-dwd-ana.namelist"
# List of output files # List of output file names that will be copied (Wildcards * allowed)
OUTPUT_FILES: "init_DOM01_ML_*.nc latbc_DOM01_ML_*.nc" output_file_names: "init_DOM01_ML_*.nc latbc_DOM01_ML_*.nc"
FILES_TO_CLEAN: "*.nc" files_to_clean: "*.nc"
data_management: data_management:
local_folder: /scratch/o/Oriol.Tinto/tmp/ # Where do we put the output files afterwards?
local_destination_folder: /scratch/o/Oriol.Tinto/tmp/
initial_conditions: initial_conditions:
# Where are we getting our initial data from?
parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/ parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/
member: 1 member: 1
\ No newline at end of file
&run_nml
ltestcase = .false.
dtime = 180
output = 'nml'
msg_level = 15
num_lev = 90
lvert_nest = .false.
ldynamics = .true.
ltransport = .true.
ntracer = 5
iforcing = 3
/
&time_nml
dt_restart = '%checkpoint_time%'
/
&io_nml
dt_checkpoint = '%checkpoint_time%'
/
&nwp_phy_nml
lupatmo_phy = .FALSE.
/
&grid_nml
dynamics_parent_grid_id = 0
dynamics_grid_filename = '%dynamics_grid_filename%'
radiation_grid_filename = '%radiation_grid_filename%'
lredgrid_phys = .true.
/
&extpar_nml
itopo = 1
extpar_filename = '%external_parameters_filename%'
/
&initicon_nml
init_mode = 1,
dwdfg_filename = '%first_guess_filename%'
dwdana_filename = '%analysis_filename%'
lconsistency_checks = .false.
ana_varnames_map_file = 'ana_varnames_map_file.txt'
/
! settings from operational setup for vertical coordinate
&sleve_nml
min_lay_thckn = 20.
max_lay_thckn = 400.
htop_thcknlimit = 14000.
top_height = 75000.
stretch_fac = 0.9
decay_scale_1 = 4000.
decay_scale_2 = 2500.
decay_exp = 1.2
flat_height = 16000.
/
&nonhydrostatic_nml
damp_height = 22000.
rayleigh_coeff = 1
/
&parallel_nml
nproma = 16
/
! LATBC files, these files will be used as input for the next example.
&output_nml
file_interval = 'PT3600S'
output_start = '%Chunk_START_DATE%'
output_end = '%Chunk_END_DATE%'
output_filename = "latbc"
output_interval = 'PT3600S'
include_last = .true.
ml_varlist = 'u', 'v', 'w', 'theta_v', 'rho', 'qv', 'qc', 'qi', 'qr', 'qs', 'z_ifc'
/
! First Guess file
&output_nml
file_interval = 'PT3600S'
output_start = '%Chunk_START_DATE%'
output_end = '%Chunk_END_DATE%'
output_filename = "init"
output_interval = 'PT3600S'
include_last = .true.
ml_varlist = 'group:dwd_fg_atm_vars', 'group:dwd_fg_sfc_vars'
/
&master_nml
lrestart = "%is_restart%"
lrestart_write_last = .true.
/
&master_model_nml
model_type = 1 ! atmospheric model
model_name = "ATMO" ! name of this model component
model_namelist_filename = "icon_atmosphere.namelist"
/
&master_time_control_nml
calendar = "proleptic gregorian"
experimentStartDate = '%Chunk_START_DATE%'
experimentStopDate = '%Chunk_END_DATE%'
/
\ No newline at end of file
File moved
File moved
File moved
#!/bin/bash -l
# Get some variables provided by autosubmit.
WORKDIR=%HPCROOTDIR%
dynamics_grid_filename=%simulation.dynamics_grid_filename%
radiation_grid_filename=%simulation.radiation_grid_filename%
external_parameters_filename=%simulation.external_parameters_filename%
STARTDATE=%SDATE%
# Example of date format "2018-06-01T00:00:00Z"
START_YEAR=%Chunk_START_YEAR%
START_MONTH=%Chunk_START_MONTH%
START_DAY=%Chunk_START_DAY%
START_HOUR=%Chunk_START_HOUR%
END_YEAR=%Chunk_END_YEAR%
END_MONTH=%Chunk_END_MONTH%
END_DAY=%Chunk_END_DAY%
END_HOUR=%Chunk_END_HOUR%
Chunk_START_DATE="${START_YEAR}-${START_MONTH}-${START_DAY}T${START_HOUR}:00:00Z"
Chunk_END_DATE="${END_YEAR}-${END_MONTH}-${END_DAY}T${END_HOUR}:00:00Z"
# Convert dates to Unix timestamps
t1_unix=$(date -d "$Chunk_START_DATE" +%s)
t2_unix=$(date -d "$Chunk_END_DATE" +%s)
# Compute difference in seconds
checkpoint_time=$(((t2_unix - t1_unix)))
# Compute number of steps
dtime=180
# Extend chunk 10 minutes to ensure checkpoint creation at the proper time
# TODO: Works but it is a bit ugly.
Chunk_END_DATE="${END_YEAR}-${END_MONTH}-${END_DAY}T${END_HOUR}:10:00Z"
MEMBER=%MEMBER%
CHUNK=%CHUNK%
# If the chunk is not the first one, start from a restart file.
if [[ "${CHUNK}" -eq "1" ]]; then
is_restart=.false.
else
is_restart=.true.
fi
# Define rundir
RUNDIR=${WORKDIR}/${STARTDATE}/${MEMBER}
cd ${RUNDIR} || exit
# Get AN and FG file names from file that was saved during prepare_rundir
AN_FILE=$( cat an_file.txt )
FG_FILE=$( cat fg_file.txt )
File moved
...@@ -11,7 +11,7 @@ HPCHOST=%HPCHOST% ...@@ -11,7 +11,7 @@ HPCHOST=%HPCHOST%
# Define output dir in remote machine # Define output dir in remote machine
OUTPUT_DIR=${WORKDIR}/output/${STARTDATE}/${MEMBER} OUTPUT_DIR=${WORKDIR}/output/${STARTDATE}/${MEMBER}
MAIN_LOCAL_FOLDER=%data_management.local_folder%/%DEFAULT.EXPID% MAIN_LOCAL_FOLDER=%data_management.local_destination_folder%/%DEFAULT.EXPID%
DESTINATION_DIR=${MAIN_LOCAL_FOLDER}/${STARTDATE}/${MEMBER} DESTINATION_DIR=${MAIN_LOCAL_FOLDER}/${STARTDATE}/${MEMBER}
......
# Synchronize the local namelists with the remote directory
# Get some variables provided by autosubmit.
WORKDIR=%HPCROOTDIR%
HPCUSER=%HPCUSER%
HPCHOST=%HPCHOST%
# Define local and remote namelists folders
REMOTE_WORKDIR=${WORKDIR}/
LOCAL_NAMELISTS_FOLDER="%PROJDIR%/namelists"
# Transfer the namelists
rsync -v -u -r --no-relative ${LOCAL_NAMELISTS_FOLDER} ${HPCUSER}@${HPCHOST}:${REMOTE_WORKDIR}
#!/bin/bash -l
# Get some variables provided by autosubmit.
WORKDIR=%HPCROOTDIR%
ICON_VERSION=%ICON_VERSION%
BGRID_GLOBAL=%simulation.BGRID_GLOBAL%
RGRID_GLOBAL=%simulation.RGRID_GLOBAL%
BGRID=%simulation.BGRID%
RGRID=%simulation.RGRID%
BEXTPAR=%simulation.BEXTPAR%
STARTDATE=%SDATE%
# Example of date format "2018-06-01T00:00:00Z"
START_YEAR=%Chunk_START_YEAR%
START_MONTH=%Chunk_START_MONTH%
START_DAY=%Chunk_START_DAY%
START_HOUR=%Chunk_START_HOUR%
END_YEAR=%Chunk_END_YEAR%
END_MONTH=%Chunk_END_MONTH%
END_DAY=%Chunk_END_DAY%
END_HOUR=%Chunk_END_HOUR%
Chunk_START_DATE="${START_YEAR}-${START_MONTH}-${START_DAY}T${START_HOUR}:00:00Z"
Chunk_END_DATE="${END_YEAR}-${END_MONTH}-${END_DAY}T${END_HOUR}:00:00Z"
# Convert dates to Unix timestamps
t1_unix=$(date -d "$Chunk_START_DATE" +%s)
t2_unix=$(date -d "$Chunk_END_DATE" +%s)
# Compute difference in seconds
checkpoint_time=$(((t2_unix - t1_unix)))
# Compute number of steps
dtime=180
nsteps=$((checkpoint_time / ${dtime}))
# Extend chunk 10 minutes to ensure checkpoint creation at the proper time
# TODO: Works but it is a bit ugly.
Chunk_END_DATE="${END_YEAR}-${END_MONTH}-${END_DAY}T${END_HOUR}:10:00Z"
MEMBER=%MEMBER%
CHUNK=%CHUNK%
# If the chunk is not the first one, start from a restart file.
if [[ "${CHUNK}" -eq "1" ]]; then
is_restart=.false.
else
is_restart=.true.
fi
# Define rundir
RUNDIR=${WORKDIR}/${STARTDATE}/${MEMBER}
cd ${RUNDIR}
# Get AN and FG file names from file that was saved during prepare_rundir
AN_FILE=$( cat an_file.txt )
FG_FILE=$( cat fg_file.txt )
# cat >icon_master.namelist <<EOF
# &master_nml
# lrestart = ${is_restart}
# lrestart_write_last = .true.
# /
# &master_model_nml
# model_type = 1 ! atmospheric model
# model_name = "ATMO" ! name of this model component
# model_namelist_filename = "icon_atmosphere.namelist"
# /
# &master_time_control_nml
# calendar = "proleptic gregorian"
# experimentStartDate = '${Chunk_START_DATE}'
# experimentStopDate = '${Chunk_END_DATE}'
# /
# EOF
# cat >icon_atmosphere.namelist <<EOF
# &run_nml
# ltestcase = .TRUE. ! idealized testcase runs
# dtime = 300 ! time step of 300 seconds
# output = 'nml' ! use output nameslists
# msg_level = 15
# nsteps = ${nsteps}
# num_lev = 31
# lvert_nest = .false.
# ldynamics = .true.
# ltransport = .true.
# ntracer = 5
# iforcing = 3
# /
# &time_nml
# dt_restart = ${checkpoint_time}
# /
# &io_nml
# dt_checkpoint = ${checkpoint_time}
# /
# &grid_nml
# dynamics_grid_filename = '$BGRID_GLOBAL'
# radiation_grid_filename = '$RGRID_GLOBAL'
# dynamics_parent_grid_id = 0
# lredgrid_phys = .true.
# /
# &nh_testcase_nml
# nh_test_name = 'APE_nwp' ! testcase selection
# ape_sst_case = 'sst_qobs'
# /
# &nonhydrostatic_nml
# damp_height = 18000
# rayleigh_coeff = 0.75
# /
# &nwp_phy_nml
# inwp_surface = 0
# /
# &parallel_nml
# nproma = 16
# /
# ! the following two output files are used to initialize the next run
# &output_nml
# file_interval = 'PT3600S'
# output_start = '${Chunk_START_DATE}'
# output_end = '${Chunk_END_DATE}'
# output_filename = "init-test"
# output_interval = 'PT3600S'
# include_last = .true.
# mode = 1
# taxis_tunit = 1
# ml_varlist = 'group:dwd_fg_atm_vars', 'group:dwd_fg_sfc_vars'
# remap = 1
# reg_lon_def = -30.,0.5,30.
# reg_lat_def = 90.,-0.5, -90.
# /
# &output_nml
# steps_per_file = 1
# output_start = '${Chunk_START_DATE}'
# output_end = '${Chunk_START_DATE}'
# output_filename = "init-test-ext"
# include_last = .true.
# output_interval = 'PT3600S'
# ml_varlist = 'depth_lk', 'emis_rad', 'fr_lake', 'fr_land', 'topography_c', 'soiltyp', 'sso_stdh', 'sso_theta', 'sso_gamma', 'sso_sigma'
# /
# EOF
# TODO: The namelist shouldn't be hardcoded into the template files.
cat > icon_atmosphere.namelist << EOF
&run_nml
ltestcase = .false.
dtime = ${dtime}
output = 'nml'
msg_level = 15
num_lev = 90
lvert_nest = .false.
ldynamics = .true.
ltransport = .true.
ntracer = 5
iforcing = 3
/
&time_nml
dt_restart = ${checkpoint_time}
/
&io_nml
dt_checkpoint = ${checkpoint_time}
/
&nwp_phy_nml
lupatmo_phy = .FALSE.
/
&grid_nml
dynamics_parent_grid_id = 0
dynamics_grid_filename = '$BGRID'
radiation_grid_filename = '$RGRID'
lredgrid_phys = .true.
/
&extpar_nml
itopo = 1
extpar_filename = '$BEXTPAR'
/
&initicon_nml
init_mode = 1,
dwdfg_filename = '$FG_FILE'
dwdana_filename = '$AN_FILE'
lconsistency_checks = .false.
ana_varnames_map_file = 'ana_varnames_map_file.txt'
/
! settings from operational setup for vertical coordinate
&sleve_nml
min_lay_thckn = 20.
max_lay_thckn = 400.
htop_thcknlimit = 14000.
top_height = 75000.
stretch_fac = 0.9
decay_scale_1 = 4000.
decay_scale_2 = 2500.
decay_exp = 1.2
flat_height = 16000.
/
&nonhydrostatic_nml
damp_height = 22000.
rayleigh_coeff = 1
/
&parallel_nml
nproma = 16
/
! LATBC files, these files will be used as input for the next example.
&output_nml
file_interval = 'PT3600S'
output_start = '${Chunk_START_DATE}'
output_end = '${Chunk_END_DATE}'
output_filename = "latbc"
output_interval = 'PT3600S'
include_last = .true.
ml_varlist = 'u', 'v', 'w', 'theta_v', 'rho', 'qv', 'qc', 'qi', 'qr', 'qs', 'z_ifc'
/
! First Guess file
&output_nml
file_interval = 'PT3600S'
output_start = '${Chunk_START_DATE}'
output_end = '${Chunk_END_DATE}'
output_filename = "init"
output_interval = 'PT3600S'
include_last = .true.
ml_varlist = 'group:dwd_fg_atm_vars', 'group:dwd_fg_sfc_vars'
/
EOF
cat > icon_master.namelist << EOF
&master_nml
lrestart = ${is_restart}
lrestart_write_last = .true.
/
&master_model_nml
model_type = 1 ! atmospheric model
model_name = "ATMO" ! name of this model component
model_namelist_filename = "icon_atmosphere.namelist"
/
&master_time_control_nml
calendar = "proleptic gregorian"
experimentStartDate = '${Chunk_START_DATE}'
experimentStopDate = '${Chunk_END_DATE}'
/
EOF
import logging
import re
from datetime import datetime, timedelta
from pathlib import Path
import f90nml
logger = logging.getLogger("prepare_chunk")
logger.setLevel(logging.INFO)
# Get some autosubmit variables
WORKDIR = "%HPCROOTDIR%"
STARTDATE = "%SDATE%"
MEMBER = "%MEMBER%"
CHUNK = "%CHUNK%"
# Example of date format "2018-06-01T00:00:00Z"
date_format = "%simulation.date_format%"
START_YEAR = "%Chunk_START_YEAR%"
START_MONTH = "%Chunk_START_MONTH%"
START_DAY = "%Chunk_START_DAY%"
START_HOUR = "%Chunk_START_HOUR%"
END_YEAR = "%Chunk_END_YEAR%"
END_MONTH = "%Chunk_END_MONTH%"
END_DAY = "%Chunk_END_DAY%"
END_HOUR = "%Chunk_END_HOUR%"
Chunk_START_DATE = datetime(year=int(START_YEAR), month=int(START_MONTH), day=int(START_DAY), hour=int(START_HOUR))
Chunk_END_DATE = datetime(year=int(END_YEAR), month=int(END_MONTH), day=int(END_DAY), hour=int(END_HOUR))
# Compute difference in seconds
checkpoint_time = int((Chunk_END_DATE - Chunk_START_DATE).total_seconds())
# TODO: Is that really necessary?
# Add 10 minutes to allow the model to write the restarts
Chunk_END_DATE = Chunk_END_DATE + timedelta(minutes=10)
# Get run directory
RUNDIR = Path(f"{WORKDIR}/{STARTDATE}/{MEMBER}")
# TODO: This is a bit ugly
# Read first-guess and analysis filenames from files:
first_guess_filename = (RUNDIR / "fg_file.txt").read_text().strip()
analysis_filename = (RUNDIR / "an_file.txt").read_text().strip()
# Get some variable replacements from the proj.yml file through autosubmit
variable_replacements = {
"dynamics_grid_filename": "%simulation.dynamics_grid_filename%",
"radiation_grid_filename": "%simulation.radiation_grid_filename%",
"external_parameters_filename": "%simulation.external_parameters_filename%",
"first_guess_filename": first_guess_filename,
"analysis_filename": analysis_filename,
"Chunk_START_DATE": Chunk_START_DATE.strftime(date_format),
"Chunk_END_DATE": Chunk_END_DATE.strftime(date_format),
"is_restart": False if "%CHUNK%" == "1" else True,
"checkpoint_time": checkpoint_time,
}
def adapt_namelist(input_namelist: str, output_namelist: str):
input_namelist = Path(input_namelist)
output_namelist = Path(output_namelist)
namelist = f90nml.read(input_namelist.as_posix())
group_keys = [gk for gk in namelist]
for group in group_keys:
variable_keys = [vk for vk in namelist[group]]
for variable in variable_keys:
value = namelist[group][variable]
m = re.match(r"%(.*)%", str(value))
if m:
key = m.group(1)
if key not in variable_replacements:
raise AssertionError(f"The namelist {input_namelist.as_posix()!r} contains the variable {key!r} "
f"which is not in the list of provided replacements:\n"
f"{[v for v in variable_replacements]}")
logger.info(f"Replacing {group}>{variable}:{key} with {variable_replacements[key]!r}")
namelist[group][variable] = variable_replacements[key]
f90nml.write(nml=namelist, nml_path=output_namelist.as_posix(), force=True)
if __name__ == '__main__':
atmosphere_namelist_path = "%simulation.namelist_paths.atmosphere%"
master_namelist_path = "%simulation.namelist_paths.master%"
# Adapt atmosphere namelist
adapt_namelist(input_namelist=atmosphere_namelist_path,
output_namelist=(RUNDIR / "icon_atmosphere.namelist").as_posix())
# Adapt master namelist
adapt_namelist(input_namelist=master_namelist_path,
output_namelist=(RUNDIR / "icon_master.namelist").as_posix())
...@@ -2,13 +2,9 @@ ...@@ -2,13 +2,9 @@
# Get some variables provided by autosubmit. # Get some variables provided by autosubmit.
WORKDIR=%HPCROOTDIR% WORKDIR=%HPCROOTDIR%
ICON_VERSION=%ICON_VERSION% DYNAMICS_GRID_FILENAME=%simulation.dynamics_grid_filename%
RADIATION_GRID_FILE=%simulation.radiation_grid_filename%
BGRID_GLOBAL=%simulation.BGRID_GLOBAL% EXTERNAL_PARAMETERS_FILE=%simulation.external_parameters_filename%
RGRID_GLOBAL=%simulation.RGRID_GLOBAL%
BGRID=%simulation.BGRID%
RGRID=%simulation.RGRID%
BEXTPAR=%simulation.BEXTPAR%
# Activate spack # Activate spack
...@@ -26,18 +22,18 @@ cd "${COMMON_INIDATA_FOLDER}" || exit ...@@ -26,18 +22,18 @@ cd "${COMMON_INIDATA_FOLDER}" || exit
# Download or copy required input files # Download or copy required input files
function download_file() { function download_file() {
URL=$1 URL=$1
FILE=${2:-$(basename $URL)} FILE=${2:-$(basename "$URL")}
if [ ! -e $FILE ]; then if [ ! -e "$FILE" ]; then
echo "Download $URL => $FILE" echo "Download $URL => $FILE"
wget -q $URL -O $FILE wget -q "$URL" -O "$FILE"
fi fi
} }
# Download grid files and external parameters # Download grid files and external parameters
BASEURL=http://icon-downloads.mpimet.mpg.de/grids/public/edzw BASEURL=http://icon-downloads.mpimet.mpg.de/grids/public/edzw
download_file $BASEURL/$BGRID download_file $BASEURL/${DYNAMICS_GRID_FILENAME}
download_file $BASEURL/$RGRID download_file $BASEURL/${RADIATION_GRID_FILE}
download_file $BASEURL/$BEXTPAR download_file $BASEURL/${EXTERNAL_PARAMETERS_FILE}
# Link input for radiation # Link input for radiation
ln -sf "${ICON_DATA_PATH}/rrtmg_lw.nc" . ln -sf "${ICON_DATA_PATH}/rrtmg_lw.nc" .
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment