From 6a8f152c060dcd7a0f0fc633d2093eab799d84ed Mon Sep 17 00:00:00 2001
From: "Jonas.Spaeth" <jonas.spaeth@physik.uni-muenchen.de>
Date: Wed, 9 Aug 2023 12:11:16 +0200
Subject: [PATCH] spin_off initial commit

---
 conf/spin-off/expdef.yml                      |  13 ++
 conf/spin-off/jobs.yml                        | 136 +++++++++++
 conf/spin-off/parent_simulation.yml           |  23 ++
 conf/spin-off/spinoff_simulation.yml          |  22 ++
 namelists/spin-off/icon_atmopshere.namelist   |   1 +
 templates/spin-off/adapt_member_utils.sh      | 218 ++++++++++++++++++
 templates/spin-off/adapt_parent_member.sh     |  66 ++++++
 templates/spin-off/adapt_spinoff_member.sh    |  66 ++++++
 templates/spin-off/prepare_parent_date.sh     |  76 ++++++
 .../spin-off/prepare_parent_experiment.sh     | 106 +++++++++
 templates/spin-off/prepare_parent_member.sh   |  41 ++++
 templates/spin-off/prepare_parent_namelist.py | 174 ++++++++++++++
 templates/spin-off/prepare_parent_runtime.py  |  46 ++++
 templates/spin-off/prepare_spinoff_date.sh    |  68 ++++++
 .../spin-off/prepare_spinoff_experiment.sh    | 106 +++++++++
 templates/spin-off/prepare_spinoff_member.sh  |  41 ++++
 .../spin-off/prepare_spinoff_namelist.py      | 174 ++++++++++++++
 17 files changed, 1377 insertions(+)
 create mode 100644 conf/spin-off/expdef.yml
 create mode 100644 conf/spin-off/jobs.yml
 create mode 100644 conf/spin-off/parent_simulation.yml
 create mode 100644 conf/spin-off/spinoff_simulation.yml
 create mode 100644 namelists/spin-off/icon_atmopshere.namelist
 create mode 100644 templates/spin-off/adapt_member_utils.sh
 create mode 100644 templates/spin-off/adapt_parent_member.sh
 create mode 100644 templates/spin-off/adapt_spinoff_member.sh
 create mode 100644 templates/spin-off/prepare_parent_date.sh
 create mode 100644 templates/spin-off/prepare_parent_experiment.sh
 create mode 100644 templates/spin-off/prepare_parent_member.sh
 create mode 100644 templates/spin-off/prepare_parent_namelist.py
 create mode 100644 templates/spin-off/prepare_parent_runtime.py
 create mode 100644 templates/spin-off/prepare_spinoff_date.sh
 create mode 100644 templates/spin-off/prepare_spinoff_experiment.sh
 create mode 100644 templates/spin-off/prepare_spinoff_member.sh
 create mode 100644 templates/spin-off/prepare_spinoff_namelist.py

diff --git a/conf/spin-off/expdef.yml b/conf/spin-off/expdef.yml
new file mode 100644
index 0000000..5b30855
--- /dev/null
+++ b/conf/spin-off/expdef.yml
@@ -0,0 +1,13 @@
+# TODO: How to specify mother-member/ date combinations as start points for spin-offs?
+experiment:
+#  DATELIST: 20201001
+#  MEMBERS: "m[1-2]"
+  DATELIST_FROM_MOTHERMEMBERS:  # list of (date, mothermember) to run spinoffs from
+    - [20201002, 2]
+    - [20201003, 2]
+  MEMBERS: "m[1-2]" # number of members in spinoff ensemble
+  CHUNKSIZEUNIT: day
+  CHUNKSIZE: 7
+  NUMCHUNKS: 2
+  CHUNKINI: 0
+  CALENDAR: standard
\ No newline at end of file
diff --git a/conf/spin-off/jobs.yml b/conf/spin-off/jobs.yml
new file mode 100644
index 0000000..f9cd36f
--- /dev/null
+++ b/conf/spin-off/jobs.yml
@@ -0,0 +1,136 @@
+## This file contains the template of the generic workflow
+## It can be used as a reference to create a custom workflow.
+# TODO: QUESTION: How to split workflows from mother-run-twin and spinoff ensembles?
+JOBS:
+  TRANSFER_PROJECT:
+    FILE: templates/common/transfer_project.sh
+    PLATFORM: LOCAL
+
+  BUILD_ICON:
+    FILE: templates/common/build_icon.sh
+    DEPENDENCIES: TRANSFER_PROJECT
+    WALLCLOCK: 04:00
+    PROCESSORS: 16
+    RETRIALS: 2 # retry because spack downloads sometimes timeout
+    NODES: 1
+
+  BUILD_PYTHON_ENVIRONMENT:
+    FILE: templates/common/build_python_environment.sh
+    # Right now we rely on spack for building icon and having a python interpreter, so we need this dependency:
+    DEPENDENCIES: BUILD_ICON
+    WALLCLOCK: 01:00
+    PROCESSORS: 16
+    NODES: 1
+
+############# parent experiment
+
+  PREPARE_PARENT_EXPERIMENT:
+    FILE: templates/event-generator/prepare_parent_experiment.sh
+    DEPENDENCIES: BUILD_ICON
+    RUNNING: once
+    WALLCLOCK: 01:00
+
+  PREPARE_PARENT_DATE:
+    FILE: templates/event-generator/prepare_parent_date.sh
+    RUNNING: date
+    WALLCLOCK: 01:00
+    PLATFORM: LOCAL
+
+  PREPARE_PARENT_MEMBER:
+    FILE: templates/event-generator/prepare_parent_member.sh
+    DEPENDENCIES: PREPARE_PARENT_EXPERIMENT PREPARE_PARENT_DATE
+    RUNNING: member
+    WALLCLOCK: 01:00
+
+  ADAPT_PARENT_MEMBER:
+    FILE: templates/event-generator/adapt_parent_member.sh
+    RUNNING: member
+    WALLCLOCK: 00:20
+    DEPENDENCIES: PREPARE_PARENT_MEMBER TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT
+
+  PREPARE_PARENT_NAMELIST:
+    FILE: templates/event-generator/prepare_parent_namelist.py
+    DEPENDENCIES: ADAPT_PARENT_MEMBER RUN_PARENT_ICON-1
+    WALLCLOCK: 00:05
+    RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
+  RUN_PARENT_ICON:
+    FILE: templates/common/run_icon.sh
+    DEPENDENCIES: PREPARE_PARENT_NAMELIST COMPRESS-1  # TODO: remove COMPRESS-1?
+    WALLCLOCK: 08:00
+    RUNNING: chunk
+    PROCESSORS: 64
+    MEMORY: 81920
+    CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive" ]
+
+############# spin-off experiment
+
+  PREPARE_SPINOFF_EXPERIMENT:
+    FILE: templates/spin-off/prepare_spinoff_experiment.sh
+    DEPENDENCIES: RUN_PARENT_ICON
+    RUNNING: once
+    WALLCLOCK: 01:00
+
+  PREPARE_SPINOFF_DATE:
+    FILE: templates/spin-off/prepare_spinoff_date.sh
+    RUNNING: date
+    WALLCLOCK: 01:00
+    PLATFORM: LOCAL
+    DEPENDENCIES: RUN_PARENT_ICON
+
+  PREPARE_SPINOFF_MEMBER:
+    FILE: templates/spin-off/prepare_spinoff_member.sh
+    DEPENDENCIES: PREPARE_SPINOFF_EXPERIMENT PREPARE_SPINOFF_DATE
+    RUNNING: member
+    WALLCLOCK: 01:00
+
+  ADAPT_SPINOFF_MEMBER:
+    FILE: templates/spin-off/adapt_spinoff_member.sh
+    RUNNING: member
+    WALLCLOCK: 00:20
+    DEPENDENCIES: PREPARE_SPINOFF_MEMBER TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT
+
+  PREPARE_SPINOFF_NAMELIST:
+    FILE: templates/event-generator/prepare_spinoff_namelist.py
+    DEPENDENCIES: ADAPT_SPINOFF_MEMBER RUN_ICON-1
+    WALLCLOCK: 00:05
+    RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
+  RUN_SPINOFF_ICON:
+    FILE: templates/common/run_icon.sh
+    DEPENDENCIES: PREPARE_SPINOFF_NAMELIST COMPRESS-1
+    WALLCLOCK: 08:00
+    RUNNING: chunk
+    PROCESSORS: 64
+    MEMORY: 81920
+    CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive" ]
+
+############# finalize
+
+  COMPRESS:
+    FILE: templates/common/compress.py
+    DEPENDENCIES: RUN_SPINOFF_ICON
+    RUNNING: member
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+    PROCESSORS: 16
+    MEMORY: 16384
+    WALLCLOCK: 01:00
+
+  TRANSFER:
+    FILE: templates/common/transfer.sh
+    DEPENDENCIES: COMPRESS
+    # Since this is running locally, can simply leave a long wallclock.
+    WALLCLOCK: 24:00
+    RUNNING: member
+    PLATFORM: LOCAL
+
+  CLEAN:
+    FILE: templates/common/clean.sh
+    DEPENDENCIES: TRANSFER
+    WALLCLOCK: 00:10
+    RUNNING: member
diff --git a/conf/spin-off/parent_simulation.yml b/conf/spin-off/parent_simulation.yml
new file mode 100644
index 0000000..da3c0f9
--- /dev/null
+++ b/conf/spin-off/parent_simulation.yml
@@ -0,0 +1,23 @@
+simulation:
+  dynamics_grid_filename: icon_grid_0010_R02B04_G.nc
+  radiation_grid_filename: icon_grid_0009_R02B03_R.nc
+
+  external_parameters_filename: icon_extpar_0010_R02B04_G.nc
+  date_format: '%Y-%m-%dT%H:%M:%SZ'
+  namelist_paths:
+    # Path to the namelists
+    master: "%HPCROOTDIR%/production_project/namelists/common/icon_master.namelist"
+    atmosphere: "%HPCROOTDIR%/production_project/namelists/event-generator/icon_atmosphere.namelist"
+
+  # List of output file names that will be copied (Wildcards * allowed)
+  output_file_names: "fields_3d_*.nc surface_fields_*.nc *_restart_*.nc"
+  files_to_clean: "*.nc"
+
+  # TODO: change parent folder to parent-run restart file
+  initial_conditions:
+    # Where are we getting our initial data from?
+    local: true
+    # /scratch/p/Philip.Rupp/experiments/2chunks/20201001/m1/icon_grid_0010_R02B04_G_restart_atm_20201031T000000Z.nc
+#    parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/
+    parent_folder: /scratch/p/Philip.Rupp/experiments/2chunks/20201001/m1/
+    sea_surface_temperature_forcing: /project/meteo/w2w/Z2/autoicon/dummy_sst_enso_neutral.nc
diff --git a/conf/spin-off/spinoff_simulation.yml b/conf/spin-off/spinoff_simulation.yml
new file mode 100644
index 0000000..0a3662c
--- /dev/null
+++ b/conf/spin-off/spinoff_simulation.yml
@@ -0,0 +1,22 @@
+simulation:
+  dynamics_grid_filename: icon_grid_0010_R02B04_G.nc
+  radiation_grid_filename: icon_grid_0009_R02B03_R.nc
+
+  external_parameters_filename: icon_extpar_0010_R02B04_G.nc
+  date_format: '%Y-%m-%dT%H:%M:%SZ'
+  namelist_paths:
+    # Path to the namelists
+    master: "%HPCROOTDIR%/production_project/namelists/common/icon_master.namelist"
+    atmosphere: "%HPCROOTDIR%/production_project/namelists/event-generator/icon_atmosphere.namelist"
+
+  # List of output file names that will be copied (Wildcards * allowed)
+  output_file_names: "fields_3d_*.nc surface_fields_*.nc *_restart_*.nc"
+  files_to_clean: "*.nc"
+
+  # TODO: change parent folder to parent-run restart file: folder is only being created after parent has produced new restart file
+  initial_conditions:
+    # Where are we getting our initial data from?
+    local: true
+#    parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/
+#    parent_folder: /scratch/j/Jonas.Spaeth/autosubmit_scratch/ls-craig/Jonas.Spaeth/$EXPERIMENT_ID$/$PARENT_CHUNK_START_DATE$/
+    sea_surface_temperature_forcing: /project/meteo/w2w/Z2/autoicon/dummy_sst_enso_neutral.nc
diff --git a/namelists/spin-off/icon_atmopshere.namelist b/namelists/spin-off/icon_atmopshere.namelist
new file mode 100644
index 0000000..11d858e
--- /dev/null
+++ b/namelists/spin-off/icon_atmopshere.namelist
@@ -0,0 +1 @@
+# TODO: same as event-generator namelist, but different output? (maybe not needed)
\ No newline at end of file
diff --git a/templates/spin-off/adapt_member_utils.sh b/templates/spin-off/adapt_member_utils.sh
new file mode 100644
index 0000000..e0432d8
--- /dev/null
+++ b/templates/spin-off/adapt_member_utils.sh
@@ -0,0 +1,218 @@
+function interpolate_SST() {
+  local DYNAMICS_GRID_FILE="$1"
+  local SST_INPUT="$2"
+  local SST_OUTPUT="$3"
+
+  remap_namelist="tmp_sst_remap.nmp"
+  cat >${remap_namelist} <<END
+
+  &remap_nml
+  in_filename = "${SST_INPUT}"
+  in_type = 1
+  out_grid_filename = "${DYNAMICS_GRID_FILE}"
+  out_filename = "${SST_OUTPUT}"
+  out_type = 2
+  out_filetype = 4
+  /
+
+  &input_field_nml
+  inputname = "T_SEA"
+  outputname = "T_SEA"
+  code=167
+  /
+
+END
+
+  ulimit -s unlimited
+  OMP_NUM_THREADS=1 iconremap --remap_nml ${remap_namelist} -vvv
+
+  # Remove namelist
+  rm -f ${remap_namelist}
+
+}
+
+#!/bin/bash
+
+# Define the function
+function grid_filename_from_extpar() {
+  # Assuming the filename is given as the first argument to the function
+  filename="$1"
+
+  # This extracts the grid identifier from the filename
+  grid_id="${filename:12:13}"
+
+  # This constructs the new filename
+  grid_filename="icon_grid_${grid_id}.nc"
+
+  # This prints out the new filename
+  echo "${grid_filename}"
+}
+
+function interpolate_extpar() {
+
+  local DYNAMICS_GRID_FILE="$1"
+  local EXTERNAL_PARAMETERS_FILE="$2"
+
+  echo "Converting extpar file to ICON grid"
+
+  EXTERNAL_PARAMETERS_GRID_FILE=$(grid_filename_from_extpar ${EXTERNAL_PARAMETERS_FILE})
+
+  if [ "${DYNAMICS_GRID_FILE}" != "${EXTERNAL_PARAMETERS_GRID_FILE}" ]; then
+    cat >remap.nml <<END
+&remap_nml
+in_filename = "${EXTERNAL_PARAMETERS_FILE}"
+in_grid_filename = "${EXTERNAL_PARAMETERS_GRID_FILE}"
+in_type = 2
+out_grid_filename = "${DYNAMICS_GRID_FILE}"
+out_filename = "tmp_extpar.nc"
+out_type = 2
+out_filetype = 4
+/
+END
+
+    cat <<END >extend_remap_namelist.py
+import xarray as xr
+
+with xr.open_dataset("${EXTERNAL_PARAMETERS_FILE}") as ds, open('remap.nml', 'a') as file:
+   for ida, da in enumerate(ds):
+       if da in ["clat","clon","lat","lon"]:
+           continue
+       file.write('&input_field_nml\n')
+       file.write(f'inputname = "{da}"\n')
+       file.write(f'outputname = "{da}"\n')
+       file.write('/\n\n')
+END
+
+    python3 extend_remap_namelist.py
+
+    ulimit -s unlimited
+    OMP_NUM_THREADS=1 iconremap --remap_nml remap.nml #-vvv
+    rm remap.nml
+
+    # Somehow, when we interpolate we lose some attributes.
+    # Here we solve this issue adding the attributes.
+    cat <<END >add_attributes.py
+# Add sst field to analysis file.
+from enstools.io import read
+
+# Open files
+with read("tmp_extpar.nc") as ds, read("${EXTERNAL_PARAMETERS_FILE}") as original_extpar_ds:
+    if "rawdata" in original_extpar_ds.attrs:
+        ds.attrs["rawdata"] = original_extpar_ds.attrs["rawdata"]
+        ds.to_netcdf("extpar.nc")
+END
+    python3 add_attributes.py
+
+    rm "tmp_extpar.nc"
+
+  else
+    ln -sf "${EXTERNAL_PARAMETERS_FILE}" "extpar.nc"
+  fi
+}
+
+function interpolate_initial_conditions() {
+
+  local INPUT_GRID="$1"
+  local IC_FILE="$2"
+  local OUTPUT_GRID="$3"
+  local OUTPUT_FILENAME="$4"
+
+  echo "Interpolating ${IC_FILE} to destination grid"
+
+  cat >remap.nml <<END
+&remap_nml
+in_filename = "${IC_FILE}"
+in_grid_filename = "${INPUT_GRID}"
+in_type = 2
+out_grid_filename = "${OUTPUT_GRID}"
+out_filename = "${OUTPUT_FILENAME}"
+out_type = 2
+out_filetype = 4
+/
+END
+
+  cat <<END >extend_remap_namelist.py
+from enstools.io import read
+
+with read("${IC_FILE}") as ds, open('remap.nml', 'a') as file:
+    all_vars = list(ds.data_vars)
+    remap_vars = []
+    for var in all_vars:
+        if not "bnds" in var and not '_vertices' in var and not 'lat' in var and not 'lon' in var:
+            remap_vars.append(var)
+
+    for var in remap_vars:
+        file.write('&input_field_nml\n')
+        file.write(f'inputname = "{var}"\n')
+        file.write(f'outputname = "{var}"\n')
+        file.write('/\n\n')
+END
+
+  python3 extend_remap_namelist.py
+
+  ulimit -s unlimited
+  OMP_NUM_THREADS=1 iconremap --remap_nml remap.nml #-vvv
+  rm remap.nml
+}
+
+function integrate_sst_to_analysis() {
+  local INTERPOLATED_SST="$1"
+  local ANALYSIS_FILE="interpolated_analysis.nc"
+
+  cat <<END >integrate_sst_to_analysis.py
+# Add sst field to analysis file.
+from enstools.io import read
+import os
+import numpy as np
+
+os.environ['ECCODES_GRIB_NO_INDEX'] = '1'
+# Open files
+with read("${ANALYSIS_FILE}") as ds, read("${INTERPOLATED_SST}") as sst_ds:
+  date=ds.time.values
+  day_of_year = int((date - date.astype('datetime64[Y]')) / np.timedelta64(1, 'D'))
+  # Replace analysis T_SEA with info from the sst_clim file.
+  ds["T_SEA"]= sst_ds["T_SEA"].isel(time=day_of_year+1)
+  ds.to_netcdf("analysis.nc")
+END
+
+  python integrate_sst_to_analysis.py
+
+}
+
+function integrate_sst_to_extpar() {
+  local INTERPOLATED_SST="$1"
+  local EXTERNAL_PARAMETERS_FILE="$2"
+
+  cat <<END >integrate_sst_to_extpar.py
+# Add sst field to analysis file.
+from enstools.io import read
+import os
+import numpy as np
+from datetime import datetime, timedelta
+import xarray as xr
+
+os.environ['ECCODES_GRIB_NO_INDEX'] = '1'
+# Open files
+with read("${EXTERNAL_PARAMETERS_FILE}") as ds, read("${INTERPOLATED_SST}") as sst_ds:
+  da = sst_ds["T_SEA"]
+  year = 2000 # or any other year
+  dates = [(datetime(year, 1, 1) + timedelta(days=idx)).month for idx, _ in enumerate(da['time'].values)]
+
+  da['time'] = dates
+
+  # Replace analysis T_SEA with monthly mean from the sst_ds.
+  monthly_means = [da.sel(time=month).mean(dim="time") for month in range(1,13)]
+  concat_da = xr.concat(monthly_means, dim='time')
+  concat_da['time'] = np.arange(1, 13) # replace 'time' with month numbers
+  concat_da = concat_da.drop(['clon', 'clat'])
+
+  # Replace analysis T_SEA with monthly mean from the sst_ds.
+  ds["T_SEA"] = concat_da
+
+  ds.to_netcdf("extpar.nc")
+
+END
+
+  python integrate_sst_to_extpar.py
+
+}
diff --git a/templates/spin-off/adapt_parent_member.sh b/templates/spin-off/adapt_parent_member.sh
new file mode 100644
index 0000000..9ed0d33
--- /dev/null
+++ b/templates/spin-off/adapt_parent_member.sh
@@ -0,0 +1,66 @@
+#!/bin/bash
+
+#TODO:
+# - Interpolate SST forcing file from regular grid to the destination grid
+# - Integrate the SST into the analysis file
+# - Add SST climatology in the external parameter file.
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+PROJ_FOLDER=${WORKDIR}/production_project
+
+SST_FORCING=$( basename %simulation.initial_conditions.sea_surface_temperature_forcing% )
+INTERPOLATED_SST=sst_climatology.nc
+DESTINATION_GRID=%simulation.dynamics_grid_filename%
+EXTERNAL_PARAMETERS_FILE=%simulation.external_parameters_filename%
+STARTDATE=%SDATE%
+MEMBER=%MEMBER%
+
+
+# Member folder
+MEMBER_DIR=${WORKDIR}/${STARTDATE}/${MEMBER}
+
+cd ${MEMBER_DIR} || exit
+
+
+# Get analysis file path
+ANALYSIS_FILE=$(find . -name "igaf*.m*.grb" -print -quit)
+FG_FILE=$(find . -name "igfff00030000.m*.grb" -print -quit)
+
+
+# Load spack
+. ${WORKDIR}/production_project/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%" "%spack.upstreams%"
+
+# Load dwd-icon-tools
+spack load --first dwd-icon-tools % "%spack.compiler%"
+
+# Set environment variable for eccodes-dwd definitions:
+source ${WORKDIR}/eccodes_defs.env
+
+# Activate virtual environment
+source ${WORKDIR}/python_environment/bin/activate
+
+# Get the functions to interpolate and adapt the input files
+source ${PROJ_FOLDER}/templates/event-generator/adapt_member_utils.sh
+
+# Interpolate SST
+interpolate_SST "${DESTINATION_GRID}" "${SST_FORCING}" "${INTERPOLATED_SST}"
+
+
+# Interpolate extpar
+#interpolate_extpar "${DESTINATION_GRID}" "${EXTERNAL_PARAMETERS_FILE}"
+
+
+# Integrate sst to extpar
+integrate_sst_to_extpar "${INTERPOLATED_SST}" "${EXTERNAL_PARAMETERS_FILE}"
+
+
+# Interpolate analysis
+interpolate_initial_conditions "icon_grid_0024_R02B06_G.nc" "${ANALYSIS_FILE}" "${DESTINATION_GRID}" "interpolated_analysis.nc"
+
+# Interpolate first guess
+interpolate_initial_conditions "icon_grid_0024_R02B06_G.nc" "${FG_FILE}" "${DESTINATION_GRID}" "first_guess.nc"
+
+
+integrate_sst_to_analysis "${INTERPOLATED_SST}"
diff --git a/templates/spin-off/adapt_spinoff_member.sh b/templates/spin-off/adapt_spinoff_member.sh
new file mode 100644
index 0000000..9ed0d33
--- /dev/null
+++ b/templates/spin-off/adapt_spinoff_member.sh
@@ -0,0 +1,66 @@
+#!/bin/bash
+
+#TODO:
+# - Interpolate SST forcing file from regular grid to the destination grid
+# - Integrate the SST into the analysis file
+# - Add SST climatology in the external parameter file.
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+PROJ_FOLDER=${WORKDIR}/production_project
+
+SST_FORCING=$( basename %simulation.initial_conditions.sea_surface_temperature_forcing% )
+INTERPOLATED_SST=sst_climatology.nc
+DESTINATION_GRID=%simulation.dynamics_grid_filename%
+EXTERNAL_PARAMETERS_FILE=%simulation.external_parameters_filename%
+STARTDATE=%SDATE%
+MEMBER=%MEMBER%
+
+
+# Member folder
+MEMBER_DIR=${WORKDIR}/${STARTDATE}/${MEMBER}
+
+cd ${MEMBER_DIR} || exit
+
+
+# Get analysis file path
+ANALYSIS_FILE=$(find . -name "igaf*.m*.grb" -print -quit)
+FG_FILE=$(find . -name "igfff00030000.m*.grb" -print -quit)
+
+
+# Load spack
+. ${WORKDIR}/production_project/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%" "%spack.upstreams%"
+
+# Load dwd-icon-tools
+spack load --first dwd-icon-tools % "%spack.compiler%"
+
+# Set environment variable for eccodes-dwd definitions:
+source ${WORKDIR}/eccodes_defs.env
+
+# Activate virtual environment
+source ${WORKDIR}/python_environment/bin/activate
+
+# Get the functions to interpolate and adapt the input files
+source ${PROJ_FOLDER}/templates/event-generator/adapt_member_utils.sh
+
+# Interpolate SST
+interpolate_SST "${DESTINATION_GRID}" "${SST_FORCING}" "${INTERPOLATED_SST}"
+
+
+# Interpolate extpar
+#interpolate_extpar "${DESTINATION_GRID}" "${EXTERNAL_PARAMETERS_FILE}"
+
+
+# Integrate sst to extpar
+integrate_sst_to_extpar "${INTERPOLATED_SST}" "${EXTERNAL_PARAMETERS_FILE}"
+
+
+# Interpolate analysis
+interpolate_initial_conditions "icon_grid_0024_R02B06_G.nc" "${ANALYSIS_FILE}" "${DESTINATION_GRID}" "interpolated_analysis.nc"
+
+# Interpolate first guess
+interpolate_initial_conditions "icon_grid_0024_R02B06_G.nc" "${FG_FILE}" "${DESTINATION_GRID}" "first_guess.nc"
+
+
+integrate_sst_to_analysis "${INTERPOLATED_SST}"
diff --git a/templates/spin-off/prepare_parent_date.sh b/templates/spin-off/prepare_parent_date.sh
new file mode 100644
index 0000000..17cec5a
--- /dev/null
+++ b/templates/spin-off/prepare_parent_date.sh
@@ -0,0 +1,76 @@
+#!/bin/bash -l
+
+# This script is executed on the machine at which autosubmit is executed.
+
+# Because it can happen that the initial conditions as well and the execution happens in the local
+# system we need to define these two variables:
+DATA_IS_LOCAL=%SIMULATION.INITIAL_CONDITIONS.LOCAL%
+
+if [ "x%HPCARCH%" == "xlocal" ]; then
+  RUN_MACHINE_IS_LOCAL="True"
+else
+  RUN_MACHINE_IS_LOCAL="False"
+fi
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+STARTDATE=%SDATE%
+HPCUSER=%HPCUSER%
+HPCHOST=%HPCHOST%
+
+# Define date directory, create it and go there
+COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+
+AN_MEMBER=$(printf "%03d" %SIMULATION.INITIAL_CONDITIONS.MEMBER%)
+INITIAL_CONDITIONS_PARENT_FOLDER=%SIMULATION.INITIAL_CONDITIONS.PARENT_FOLDER%
+# TODO: this file has to be determined manually, pointing to the last available retstart file
+#INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:6}/${STARTDATE:0:8}T00
+INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}
+
+#AN_FILES=$(find ${INITIAL_CONDITIONS_PATH} -name "igaf*.m*.grb" | sort -n )
+#FG_FILES=$(find ${INITIAL_CONDITIONS_PATH} -name "igfff00030000.m*.grb" | sort -n )
+RS_FILES=$(find ${INITIAL_CONDITIONS_PATH} -name "*restart*" | sort -n )  # NEW
+
+# Find files
+#if [ -z "${AN_FILES}" ]; then
+#  echo "Analysis files for date ${STARTDATE} not found!"
+#  exit 1
+#fi
+#
+#if [ -z "${FG_FILES}" ]; then
+#  echo "FG files for date ${STARTDATE} not found!"
+#  exit 1
+#fi
+if [ -z "${RS_FILES}" ]; then
+  echo "Restart files for date ${STARTDATE} not found!"
+  exit 1
+fi
+
+# Check if we copy the initial conditions from the local system or the remote one
+if [ "${RUN_MACHINE_IS_LOCAL}" != "True" ]; then
+  # Create member folder
+  ssh "${HPCUSER}@${HPCHOST}" mkdir -p ${COMMON_DATE_FOLDER}
+
+  # Transfer analysis and first-guess files
+#  rsync -v ${AN_FILES} "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/"
+#  rsync -v ${FG_FILES} "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/"
+  rsync -v ${RS_FILES} "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/"
+
+  # Change permissions to read only.
+#  ssh "${HPCUSER}@${HPCHOST}" chmod 440 "${COMMON_DATE_FOLDER}/*"
+else
+  # Create member folder and go there
+  mkdir -p ${COMMON_DATE_FOLDER}
+  cd ${COMMON_DATE_FOLDER} || exit
+
+
+
+  # Copy the first-guess and analysis files.
+  # Transfer analysis and first-guess files
+#  rsync -v ${AN_FILES} "${COMMON_DATE_FOLDER}/"
+#  rsync -v ${FG_FILES} "${COMMON_DATE_FOLDER}/"
+  rsync -v ${RS_FILES} "${COMMON_DATE_FOLDER}/"
+
+  # Change permissions to read only.
+  chmod 440 ./*
+fi
\ No newline at end of file
diff --git a/templates/spin-off/prepare_parent_experiment.sh b/templates/spin-off/prepare_parent_experiment.sh
new file mode 100644
index 0000000..5fc9025
--- /dev/null
+++ b/templates/spin-off/prepare_parent_experiment.sh
@@ -0,0 +1,106 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+DYNAMICS_GRID_FILENAME=%simulation.dynamics_grid_filename%
+RADIATION_GRID_FILE=%simulation.radiation_grid_filename%
+EXTERNAL_PARAMETERS_FILE=%simulation.external_parameters_filename%
+SST_FORCING=%simulation.initial_conditions.sea_surface_temperature_forcing%
+
+# Activate spack
+. ${WORKDIR}/production_project/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%" "%spack.upstreams%"
+
+# Load icon module needed to retrieve some data
+spack load --first icon-nwp@%ICON_VERSION%
+
+# Create a folder for the common inidata and go there
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+mkdir -p "${COMMON_INIDATA_FOLDER}"
+cd "${COMMON_INIDATA_FOLDER}" || exit
+
+# Change permissions
+#if [ "$(ls -A .)" ]; then
+#  chmod 660 ./*
+#fi
+
+# Download or copy required input files
+function download_file() {
+  URL=$1
+  FILE=${2:-$(basename "$URL")}
+  if [ ! -e "$FILE" ]; then
+    echo "Download $URL => $FILE"
+    wget -q "$URL" -O "$FILE"
+  fi
+}
+
+function check_url() {
+  # Check if the url exists
+  local url="$1"
+  status=$(
+    (curl -s --head $url | head -n 1 | grep  "HTTP/1.[01] [23].." >/dev/null)
+    echo $?
+  )
+  if [ $status -eq 0 ]; then
+    echo "true"
+  else
+    echo "false"
+  fi
+}
+
+function get_grfinfo_file(){
+  local filename="$1"
+  new_suffix="-grfinfo.nc"
+  new_filename="$(basename "$filename" .nc)$new_suffix"
+  echo "$new_filename"
+}
+
+# Download grid files and external parameters
+BASEURL=http://icon-downloads.mpimet.mpg.de/grids/public/edzw
+download_file $BASEURL/${DYNAMICS_GRID_FILENAME}
+download_file $BASEURL/${RADIATION_GRID_FILE}
+download_file $BASEURL/${EXTERNAL_PARAMETERS_FILE}
+
+if [[ ${DYNAMICS_GRID_FILENAME} != "icon_grid_0024_R02B06_G.nc" ]] ; then
+  download_file $BASEURL/"icon_grid_0024_R02B06_G.nc"
+fi
+
+# In case the grfinfo files exist, we download them as well
+RADIATION_GRID_INFO_FILE=$( get_grfinfo_file ${RADIATION_GRID_FILE} )
+DYNAMICS_GRID_INFO_FILE=$( get_grfinfo_file ${DYNAMICS_GRID_FILENAME} )
+
+if [ $( check_url "$BASEURL/${RADIATION_GRID_INFO_FILE}" ) == "true" ]; then
+  download_file "$BASEURL/${RADIATION_GRID_INFO_FILE}"
+fi
+
+if [ $( check_url "$BASEURL/${DYNAMICS_GRID_INFO_FILE}" ) == "true" ]; then
+  download_file "$BASEURL/${DYNAMICS_GRID_INFO_FILE}"
+fi
+
+
+# Define the function
+function grid_filename_from_extpar() {
+    local filename="$1"
+    # Extract the grid identifier from the filename
+    grid_id="${filename:12:13}"
+    grid_filename="icon_grid_${grid_id}.nc"
+    echo "${grid_filename}"
+}
+
+# If the externap parameters file is in a different grid, also download this one.
+EXTERNAL_PARAMETERS_GRID_FILE=$( grid_filename_from_extpar ${EXTERNAL_PARAMETERS_FILE} )
+if [ "${EXTERNAL_PARAMETERS_GRID_FILE}" != "${DYNAMICS_GRID_FILENAME}" ]; then
+  download_file "$BASEURL/${EXTERNAL_PARAMETERS_GRID_FILE}"
+fi
+
+
+# Link input for radiation
+ln -sf "${ICON_DATA_PATH}/rrtmg_lw.nc" .
+ln -sf "${ICON_DATA_PATH}/ECHAM6_CldOptProps.nc" .
+ln -sf "${ICON_BASE_PATH}/run/ana_varnames_map_file.txt" .
+
+# Copy sst forcing
+cp "${SST_FORCING}" .
+
+# Change permissions to read only.
+#chmod 440 ./*
\ No newline at end of file
diff --git a/templates/spin-off/prepare_parent_member.sh b/templates/spin-off/prepare_parent_member.sh
new file mode 100644
index 0000000..bdf5027
--- /dev/null
+++ b/templates/spin-off/prepare_parent_member.sh
@@ -0,0 +1,41 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+STARTDATE=%SDATE%
+MEMBER=%MEMBER%
+
+# Common folder with data needed for all simulations
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+# Common folder for the same start date
+COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+
+# Member folder
+MEMBER_DIR=${WORKDIR}/${STARTDATE}/${MEMBER}
+
+# Create member folder and go there
+mkdir -p ${MEMBER_DIR}
+
+cd ${MEMBER_DIR} || exit
+
+
+# Link all files from the common inidata folder and the common date folder
+ln -sf ${COMMON_INIDATA_FOLDER}/* .
+
+# Format the member number
+get_member_number() {
+  local var=$1
+  if [[ $var =~ [^0-9]*([0-9]+) ]]; then
+    printf "%03d" ${BASH_REMATCH[1]}
+  fi
+}
+
+
+member_num=$(get_member_number ${MEMBER})
+
+# Link analysis
+analysis_file=$(find ${COMMON_DATE_FOLDER} -name "igaf*.m${member_num}.grb" -type f -print -quit)
+ln -sf "$analysis_file" .
+
+# Link first guess
+ln -sf ${COMMON_DATE_FOLDER}/igfff00030000.m${member_num}.grb .
diff --git a/templates/spin-off/prepare_parent_namelist.py b/templates/spin-off/prepare_parent_namelist.py
new file mode 100644
index 0000000..35888d5
--- /dev/null
+++ b/templates/spin-off/prepare_parent_namelist.py
@@ -0,0 +1,174 @@
+import logging
+from datetime import datetime, timedelta
+from pathlib import Path
+
+import f90nml
+import yaml
+
+logger = logging.getLogger("prepare_chunk")
+logger.setLevel(logging.INFO)
+
+# Get some autosubmit variables
+WORKDIR = "%HPCROOTDIR%"
+STARTDATE = "%SDATE%"
+MEMBER = "%MEMBER%"
+CHUNK = "%CHUNK%"
+# Get run directory
+RUNDIR = Path(f"{WORKDIR}/{STARTDATE}/{MEMBER}")
+ATMOSPHERE_NAMELIST_PATH = Path("%simulation.namelist_paths.atmosphere%")
+MASTER_NAMELIST_PATH = Path("%simulation.namelist_paths.master%")
+
+# Get first-guess and analysis names:
+# analysis_filename = (RUNDIR / "igaf*.m*.grb")
+# analysis_filename = analysis_filename.parent.glob(analysis_filename.name).__next__()
+# analysis_filename = analysis_filename.name
+analysis_filename = "analysis.nc"
+
+
+# first_guess_filename = (RUNDIR / "igfff00030000.m*.grb").resolve()
+# first_guess_filename = first_guess_filename.parent.glob(first_guess_filename.name).__next__()
+# first_guess_filename = first_guess_filename.name
+first_guess_filename = "first_guess.nc"
+
+
+# Example of date format "2018-06-01T00:00:00Z"
+date_format = "%simulation.date_format%"
+
+START_YEAR = "%Chunk_START_YEAR%"
+START_MONTH = "%Chunk_START_MONTH%"
+START_DAY = "%Chunk_START_DAY%"
+START_HOUR = "%Chunk_START_HOUR%"
+
+END_YEAR = "%Chunk_END_YEAR%"
+END_MONTH = "%Chunk_END_MONTH%"
+END_DAY = "%Chunk_END_DAY%"
+END_HOUR = "%Chunk_END_HOUR%"
+
+Chunk_START_DATE = datetime(year=int(START_YEAR), month=int(START_MONTH), day=int(START_DAY), hour=int(START_HOUR))
+Chunk_END_DATE = datetime(year=int(END_YEAR), month=int(END_MONTH), day=int(END_DAY), hour=int(END_HOUR))
+
+# Read custom namelist parameters from configuration
+atmosphere_namelist_string = """
+%atmosphere_namelist%
+"""
+
+master_namelist_string = """
+%master_namelist%
+"""
+
+# Compute difference in seconds
+checkpoint_time = int((Chunk_END_DATE - Chunk_START_DATE).total_seconds())
+
+# TODO: Is that really necessary?
+# Add 10 minutes to allow the model to write the restarts
+Chunk_END_DATE = Chunk_END_DATE + timedelta(minutes=10)
+
+atmosphere_namelist_replacements = {
+    "time_nml": {
+        "dt_restart": checkpoint_time
+    },
+    "io_nml": {
+        "dt_checkpoint": checkpoint_time
+    },
+
+    "grid_nml": {
+        "dynamics_grid_filename": "%simulation.dynamics_grid_filename%",
+        "radiation_grid_filename": "%simulation.radiation_grid_filename%",
+    },
+
+    "extpar_nml": {
+        # "extpar_filename": "%simulation.external_parameters_filename%",
+        "extpar_filename": "extpar.nc",
+    },
+
+    "initicon_nml": {
+        "dwdfg_filename": first_guess_filename,
+        "dwdana_filename": analysis_filename,
+    }
+}
+
+master_namelist_replacements = {
+    "master_nml": {
+        "lrestart": False if "%CHUNK%" == "1" else True,
+    },
+    "master_time_control_nml": {
+        "experimentStartDate": Chunk_START_DATE.strftime(date_format),
+        "experimentStopDate": Chunk_END_DATE.strftime(date_format),
+    }
+}
+
+
+def read_namelist(namelist_string: str) -> dict:
+    """
+    Function to read the custom namelist specifications provided in the configuration files.
+    It accepts both yaml and f90nml format.
+    :param namelist_string:
+    :return:
+    """
+    parameters = yaml.safe_load(namelist_string)
+    if isinstance(parameters, str):
+        parameters = f90nml.reads(nml_string=namelist_string).todict()
+    return parameters
+
+
+def patch_output_entries(namelist: f90nml.Namelist) -> f90nml.Namelist:
+    output_entries = [entry for entry in namelist["output_nml"]]
+    for entry in output_entries:
+        for key in entry:
+            if entry[key] == "#OUTPUT_START#":
+                entry[key] = Chunk_START_DATE.strftime(date_format)
+            elif entry[key] == "#OUTPUT_END#":
+                entry[key] = Chunk_END_DATE.strftime(date_format)
+
+    return namelist
+
+
+def main():
+    """
+    Main function that processes both atmosphere and master namelists and adds the necessary patches
+    :return:
+    """
+    # Process atmosphere namelist
+    atmosphere_namelist = f90nml.read(ATMOSPHERE_NAMELIST_PATH.as_posix())
+    # Convert output_nml to a co-group.
+    atmosphere_namelist.create_cogroup("output_nml")
+    print("Original atmosphere namelist:")
+    print(atmosphere_namelist)
+    atmosphere_namelist.patch(atmosphere_namelist_replacements)
+
+    # Read custom namelist parameters from configuration file
+    atmosphere_custom_namelist = read_namelist(atmosphere_namelist_string)
+
+    if atmosphere_custom_namelist is not None:
+        try:
+            atmosphere_namelist.patch(atmosphere_custom_namelist)
+        except AttributeError:
+            raise AssertionError("Problem applying the namelist patch! Probably related with the output section.")
+
+    # Patch output entries:
+    atmosphere_namelist = patch_output_entries(atmosphere_namelist)
+
+    print("Patched atmosphere namelist:")
+    print(atmosphere_namelist)
+
+    atmosphere_output_namelist = (RUNDIR / "icon_atmosphere.namelist")
+    f90nml.write(nml=atmosphere_namelist, nml_path=atmosphere_output_namelist.as_posix(), force=True)
+
+    master_namelist = f90nml.read(MASTER_NAMELIST_PATH.as_posix())
+    print("Original master namelist:")
+    print(master_namelist)
+    # Read custom namelist parameters from configuration file
+    master_custom_namelist = read_namelist(master_namelist_string)
+    # Process atmosphere namelist
+    master_namelist.patch(master_namelist_replacements)
+    if master_custom_namelist is not None:
+        master_namelist.patch(master_custom_namelist)
+    print("Patched master namelist:")
+    print(master_namelist)
+    master_output_namelist = (RUNDIR / "icon_master.namelist")
+    f90nml.write(nml=master_namelist, nml_path=master_output_namelist.as_posix(), force=True)
+
+
+if __name__ == '__main__':
+    main()
+
diff --git a/templates/spin-off/prepare_parent_runtime.py b/templates/spin-off/prepare_parent_runtime.py
new file mode 100644
index 0000000..d355a42
--- /dev/null
+++ b/templates/spin-off/prepare_parent_runtime.py
@@ -0,0 +1,46 @@
+"""
+A spin-off experiment may start from a day, where no event-generator restart file is available.
+This script finds the last available restart file in the event-generator directory.
+From this date, the event-generator member can be restarted.
+"""
+from glob import glob
+from dateutil.parser import parse
+from datetime import datetime, timedelta
+
+DIR_MOTHER_RUN = '/scratch/p/Philip.Rupp/experiments/2chunks/20201001/m1'
+SPIN_OFF_START = '2020-10-20'
+NOT_MORE_THAN_N_DAYS_BACK = 30
+
+
+def infer_restart_dates_from_files_in_direcory(directory):
+    restart_files = glob(directory + '/*restart*.nc')
+
+    inferred_restart_file_dates = [parse(f.split("atm")[-1], fuzzy=True, ignoretz=True) for f in restart_files]
+    return inferred_restart_file_dates
+
+
+def find_date_before(target_date, dates_list, max_lag=None):
+    # Convert the target_date to a datetime object if it's not already
+    if not isinstance(target_date, datetime):
+        target_date = datetime.strptime(target_date, '%Y-%m-%d')  # Adjust the format if needed
+
+    # Calculate the minimum date allowed (if NOT_MORE_THAN_N_DAYS is provided)
+    if max_lag is not None:
+        min_date_allowed = target_date - timedelta(days=max_lag)
+    else:
+        min_date_allowed = datetime.min  # If NOT_MORE_THAN_N_DAYS is not provided, set a very early date
+
+    # Filter dates_list to get only the dates that occur before the target_date
+    filtered_dates = [dt for dt in dates_list if min_date_allowed <= dt < target_date]
+
+    # Find the maximum date from the filtered dates (i.e., the date that occurs first before the target_date)
+    if filtered_dates:
+        return max(filtered_dates)
+    else:
+        return None  # No date found before the target_date
+
+
+if __name__ == "__main__":
+    restart_file_dates = infer_restart_dates_from_files_in_direcory(DIR_MOTHER_RUN)
+    restart_from = find_date_before(SPIN_OFF_START, restart_file_dates, max_lag=NOT_MORE_THAN_N_DAYS_BACK)
+    print(restart_from)
diff --git a/templates/spin-off/prepare_spinoff_date.sh b/templates/spin-off/prepare_spinoff_date.sh
new file mode 100644
index 0000000..c1f76e2
--- /dev/null
+++ b/templates/spin-off/prepare_spinoff_date.sh
@@ -0,0 +1,68 @@
+#!/bin/bash -l
+
+# This script is executed on the machine at which autosubmit is executed.
+
+# Because it can happen that the initial conditions as well and the execution happens in the local
+# system we need to define these two variables:
+DATA_IS_LOCAL=%SIMULATION.INITIAL_CONDITIONS.LOCAL%
+
+if [ "x%HPCARCH%" == "xlocal" ]; then
+  RUN_MACHINE_IS_LOCAL="True"
+else
+  RUN_MACHINE_IS_LOCAL="False"
+fi
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+STARTDATE=%SDATE%
+HPCUSER=%HPCUSER%
+HPCHOST=%HPCHOST%
+
+# Define date directory, create it and go there
+COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+
+AN_MEMBER=$(printf "%03d" %SIMULATION.INITIAL_CONDITIONS.MEMBER%)
+INITIAL_CONDITIONS_PARENT_FOLDER=%SIMULATION.INITIAL_CONDITIONS.PARENT_FOLDER%
+INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:6}/${STARTDATE:0:8}T00
+
+AN_FILES=$(find ${INITIAL_CONDITIONS_PATH} -name "igaf*.m*.grb" | sort -n )
+FG_FILES=$(find ${INITIAL_CONDITIONS_PATH} -name "igfff00030000.m*.grb" | sort -n )
+
+# Find files
+if [ -z "${AN_FILES}" ]; then
+  echo "Analysis files for date ${STARTDATE} not found!"
+  exit 1
+fi
+
+if [ -z "${FG_FILES}" ]; then
+  echo "FG files for date ${STARTDATE} not found!"
+  exit 1
+fi
+
+
+# Check if we copy the initial conditions from the local system or the remote one
+if [ "${RUN_MACHINE_IS_LOCAL}" != "True" ]; then
+  # Create member folder
+  ssh "${HPCUSER}@${HPCHOST}" mkdir -p ${COMMON_DATE_FOLDER}
+
+  # Transfer analysis and first-guess files
+  rsync -v ${AN_FILES} "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/"
+  rsync -v ${FG_FILES} "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/"
+
+  # Change permissions to read only.
+#  ssh "${HPCUSER}@${HPCHOST}" chmod 440 "${COMMON_DATE_FOLDER}/*"
+else
+  # Create member folder and go there
+  mkdir -p ${COMMON_DATE_FOLDER}
+  cd ${COMMON_DATE_FOLDER} || exit
+
+
+
+  # Copy the first-guess and analysis files.
+  # Transfer analysis and first-guess files
+  rsync -v ${AN_FILES} "${COMMON_DATE_FOLDER}/"
+  rsync -v ${FG_FILES} "${COMMON_DATE_FOLDER}/"
+
+  # Change permissions to read only.
+  chmod 440 ./*
+fi
\ No newline at end of file
diff --git a/templates/spin-off/prepare_spinoff_experiment.sh b/templates/spin-off/prepare_spinoff_experiment.sh
new file mode 100644
index 0000000..5fc9025
--- /dev/null
+++ b/templates/spin-off/prepare_spinoff_experiment.sh
@@ -0,0 +1,106 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+DYNAMICS_GRID_FILENAME=%simulation.dynamics_grid_filename%
+RADIATION_GRID_FILE=%simulation.radiation_grid_filename%
+EXTERNAL_PARAMETERS_FILE=%simulation.external_parameters_filename%
+SST_FORCING=%simulation.initial_conditions.sea_surface_temperature_forcing%
+
+# Activate spack
+. ${WORKDIR}/production_project/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%" "%spack.upstreams%"
+
+# Load icon module needed to retrieve some data
+spack load --first icon-nwp@%ICON_VERSION%
+
+# Create a folder for the common inidata and go there
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+mkdir -p "${COMMON_INIDATA_FOLDER}"
+cd "${COMMON_INIDATA_FOLDER}" || exit
+
+# Change permissions
+#if [ "$(ls -A .)" ]; then
+#  chmod 660 ./*
+#fi
+
+# Download or copy required input files
+function download_file() {
+  URL=$1
+  FILE=${2:-$(basename "$URL")}
+  if [ ! -e "$FILE" ]; then
+    echo "Download $URL => $FILE"
+    wget -q "$URL" -O "$FILE"
+  fi
+}
+
+function check_url() {
+  # Check if the url exists
+  local url="$1"
+  status=$(
+    (curl -s --head $url | head -n 1 | grep  "HTTP/1.[01] [23].." >/dev/null)
+    echo $?
+  )
+  if [ $status -eq 0 ]; then
+    echo "true"
+  else
+    echo "false"
+  fi
+}
+
+function get_grfinfo_file(){
+  local filename="$1"
+  new_suffix="-grfinfo.nc"
+  new_filename="$(basename "$filename" .nc)$new_suffix"
+  echo "$new_filename"
+}
+
+# Download grid files and external parameters
+BASEURL=http://icon-downloads.mpimet.mpg.de/grids/public/edzw
+download_file $BASEURL/${DYNAMICS_GRID_FILENAME}
+download_file $BASEURL/${RADIATION_GRID_FILE}
+download_file $BASEURL/${EXTERNAL_PARAMETERS_FILE}
+
+if [[ ${DYNAMICS_GRID_FILENAME} != "icon_grid_0024_R02B06_G.nc" ]] ; then
+  download_file $BASEURL/"icon_grid_0024_R02B06_G.nc"
+fi
+
+# In case the grfinfo files exist, we download them as well
+RADIATION_GRID_INFO_FILE=$( get_grfinfo_file ${RADIATION_GRID_FILE} )
+DYNAMICS_GRID_INFO_FILE=$( get_grfinfo_file ${DYNAMICS_GRID_FILENAME} )
+
+if [ $( check_url "$BASEURL/${RADIATION_GRID_INFO_FILE}" ) == "true" ]; then
+  download_file "$BASEURL/${RADIATION_GRID_INFO_FILE}"
+fi
+
+if [ $( check_url "$BASEURL/${DYNAMICS_GRID_INFO_FILE}" ) == "true" ]; then
+  download_file "$BASEURL/${DYNAMICS_GRID_INFO_FILE}"
+fi
+
+
+# Define the function
+function grid_filename_from_extpar() {
+    local filename="$1"
+    # Extract the grid identifier from the filename
+    grid_id="${filename:12:13}"
+    grid_filename="icon_grid_${grid_id}.nc"
+    echo "${grid_filename}"
+}
+
+# If the externap parameters file is in a different grid, also download this one.
+EXTERNAL_PARAMETERS_GRID_FILE=$( grid_filename_from_extpar ${EXTERNAL_PARAMETERS_FILE} )
+if [ "${EXTERNAL_PARAMETERS_GRID_FILE}" != "${DYNAMICS_GRID_FILENAME}" ]; then
+  download_file "$BASEURL/${EXTERNAL_PARAMETERS_GRID_FILE}"
+fi
+
+
+# Link input for radiation
+ln -sf "${ICON_DATA_PATH}/rrtmg_lw.nc" .
+ln -sf "${ICON_DATA_PATH}/ECHAM6_CldOptProps.nc" .
+ln -sf "${ICON_BASE_PATH}/run/ana_varnames_map_file.txt" .
+
+# Copy sst forcing
+cp "${SST_FORCING}" .
+
+# Change permissions to read only.
+#chmod 440 ./*
\ No newline at end of file
diff --git a/templates/spin-off/prepare_spinoff_member.sh b/templates/spin-off/prepare_spinoff_member.sh
new file mode 100644
index 0000000..bdf5027
--- /dev/null
+++ b/templates/spin-off/prepare_spinoff_member.sh
@@ -0,0 +1,41 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+STARTDATE=%SDATE%
+MEMBER=%MEMBER%
+
+# Common folder with data needed for all simulations
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+# Common folder for the same start date
+COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+
+# Member folder
+MEMBER_DIR=${WORKDIR}/${STARTDATE}/${MEMBER}
+
+# Create member folder and go there
+mkdir -p ${MEMBER_DIR}
+
+cd ${MEMBER_DIR} || exit
+
+
+# Link all files from the common inidata folder and the common date folder
+ln -sf ${COMMON_INIDATA_FOLDER}/* .
+
+# Format the member number
+get_member_number() {
+  local var=$1
+  if [[ $var =~ [^0-9]*([0-9]+) ]]; then
+    printf "%03d" ${BASH_REMATCH[1]}
+  fi
+}
+
+
+member_num=$(get_member_number ${MEMBER})
+
+# Link analysis
+analysis_file=$(find ${COMMON_DATE_FOLDER} -name "igaf*.m${member_num}.grb" -type f -print -quit)
+ln -sf "$analysis_file" .
+
+# Link first guess
+ln -sf ${COMMON_DATE_FOLDER}/igfff00030000.m${member_num}.grb .
diff --git a/templates/spin-off/prepare_spinoff_namelist.py b/templates/spin-off/prepare_spinoff_namelist.py
new file mode 100644
index 0000000..35888d5
--- /dev/null
+++ b/templates/spin-off/prepare_spinoff_namelist.py
@@ -0,0 +1,174 @@
+import logging
+from datetime import datetime, timedelta
+from pathlib import Path
+
+import f90nml
+import yaml
+
+logger = logging.getLogger("prepare_chunk")
+logger.setLevel(logging.INFO)
+
+# Get some autosubmit variables
+WORKDIR = "%HPCROOTDIR%"
+STARTDATE = "%SDATE%"
+MEMBER = "%MEMBER%"
+CHUNK = "%CHUNK%"
+# Get run directory
+RUNDIR = Path(f"{WORKDIR}/{STARTDATE}/{MEMBER}")
+ATMOSPHERE_NAMELIST_PATH = Path("%simulation.namelist_paths.atmosphere%")
+MASTER_NAMELIST_PATH = Path("%simulation.namelist_paths.master%")
+
+# Get first-guess and analysis names:
+# analysis_filename = (RUNDIR / "igaf*.m*.grb")
+# analysis_filename = analysis_filename.parent.glob(analysis_filename.name).__next__()
+# analysis_filename = analysis_filename.name
+analysis_filename = "analysis.nc"
+
+
+# first_guess_filename = (RUNDIR / "igfff00030000.m*.grb").resolve()
+# first_guess_filename = first_guess_filename.parent.glob(first_guess_filename.name).__next__()
+# first_guess_filename = first_guess_filename.name
+first_guess_filename = "first_guess.nc"
+
+
+# Example of date format "2018-06-01T00:00:00Z"
+date_format = "%simulation.date_format%"
+
+START_YEAR = "%Chunk_START_YEAR%"
+START_MONTH = "%Chunk_START_MONTH%"
+START_DAY = "%Chunk_START_DAY%"
+START_HOUR = "%Chunk_START_HOUR%"
+
+END_YEAR = "%Chunk_END_YEAR%"
+END_MONTH = "%Chunk_END_MONTH%"
+END_DAY = "%Chunk_END_DAY%"
+END_HOUR = "%Chunk_END_HOUR%"
+
+Chunk_START_DATE = datetime(year=int(START_YEAR), month=int(START_MONTH), day=int(START_DAY), hour=int(START_HOUR))
+Chunk_END_DATE = datetime(year=int(END_YEAR), month=int(END_MONTH), day=int(END_DAY), hour=int(END_HOUR))
+
+# Read custom namelist parameters from configuration
+atmosphere_namelist_string = """
+%atmosphere_namelist%
+"""
+
+master_namelist_string = """
+%master_namelist%
+"""
+
+# Compute difference in seconds
+checkpoint_time = int((Chunk_END_DATE - Chunk_START_DATE).total_seconds())
+
+# TODO: Is that really necessary?
+# Add 10 minutes to allow the model to write the restarts
+Chunk_END_DATE = Chunk_END_DATE + timedelta(minutes=10)
+
+atmosphere_namelist_replacements = {
+    "time_nml": {
+        "dt_restart": checkpoint_time
+    },
+    "io_nml": {
+        "dt_checkpoint": checkpoint_time
+    },
+
+    "grid_nml": {
+        "dynamics_grid_filename": "%simulation.dynamics_grid_filename%",
+        "radiation_grid_filename": "%simulation.radiation_grid_filename%",
+    },
+
+    "extpar_nml": {
+        # "extpar_filename": "%simulation.external_parameters_filename%",
+        "extpar_filename": "extpar.nc",
+    },
+
+    "initicon_nml": {
+        "dwdfg_filename": first_guess_filename,
+        "dwdana_filename": analysis_filename,
+    }
+}
+
+master_namelist_replacements = {
+    "master_nml": {
+        "lrestart": False if "%CHUNK%" == "1" else True,
+    },
+    "master_time_control_nml": {
+        "experimentStartDate": Chunk_START_DATE.strftime(date_format),
+        "experimentStopDate": Chunk_END_DATE.strftime(date_format),
+    }
+}
+
+
+def read_namelist(namelist_string: str) -> dict:
+    """
+    Function to read the custom namelist specifications provided in the configuration files.
+    It accepts both yaml and f90nml format.
+    :param namelist_string:
+    :return:
+    """
+    parameters = yaml.safe_load(namelist_string)
+    if isinstance(parameters, str):
+        parameters = f90nml.reads(nml_string=namelist_string).todict()
+    return parameters
+
+
+def patch_output_entries(namelist: f90nml.Namelist) -> f90nml.Namelist:
+    output_entries = [entry for entry in namelist["output_nml"]]
+    for entry in output_entries:
+        for key in entry:
+            if entry[key] == "#OUTPUT_START#":
+                entry[key] = Chunk_START_DATE.strftime(date_format)
+            elif entry[key] == "#OUTPUT_END#":
+                entry[key] = Chunk_END_DATE.strftime(date_format)
+
+    return namelist
+
+
+def main():
+    """
+    Main function that processes both atmosphere and master namelists and adds the necessary patches
+    :return:
+    """
+    # Process atmosphere namelist
+    atmosphere_namelist = f90nml.read(ATMOSPHERE_NAMELIST_PATH.as_posix())
+    # Convert output_nml to a co-group.
+    atmosphere_namelist.create_cogroup("output_nml")
+    print("Original atmosphere namelist:")
+    print(atmosphere_namelist)
+    atmosphere_namelist.patch(atmosphere_namelist_replacements)
+
+    # Read custom namelist parameters from configuration file
+    atmosphere_custom_namelist = read_namelist(atmosphere_namelist_string)
+
+    if atmosphere_custom_namelist is not None:
+        try:
+            atmosphere_namelist.patch(atmosphere_custom_namelist)
+        except AttributeError:
+            raise AssertionError("Problem applying the namelist patch! Probably related with the output section.")
+
+    # Patch output entries:
+    atmosphere_namelist = patch_output_entries(atmosphere_namelist)
+
+    print("Patched atmosphere namelist:")
+    print(atmosphere_namelist)
+
+    atmosphere_output_namelist = (RUNDIR / "icon_atmosphere.namelist")
+    f90nml.write(nml=atmosphere_namelist, nml_path=atmosphere_output_namelist.as_posix(), force=True)
+
+    master_namelist = f90nml.read(MASTER_NAMELIST_PATH.as_posix())
+    print("Original master namelist:")
+    print(master_namelist)
+    # Read custom namelist parameters from configuration file
+    master_custom_namelist = read_namelist(master_namelist_string)
+    # Process atmosphere namelist
+    master_namelist.patch(master_namelist_replacements)
+    if master_custom_namelist is not None:
+        master_namelist.patch(master_custom_namelist)
+    print("Patched master namelist:")
+    print(master_namelist)
+    master_output_namelist = (RUNDIR / "icon_master.namelist")
+    f90nml.write(nml=master_namelist, nml_path=master_output_namelist.as_posix(), force=True)
+
+
+if __name__ == '__main__':
+    main()
+
-- 
GitLab