diff --git a/conf/jobs.yaml b/conf/jobs_real-from-dwd-ana.yaml
similarity index 71%
rename from conf/jobs.yaml
rename to conf/jobs_real-from-dwd-ana.yaml
index e9feea386e08f7d2e9a6c5191c5a7d927db76183..94c33cf5d02e01a0134ac05daa0102651ee030f9 100644
--- a/conf/jobs.yaml
+++ b/conf/jobs_real-from-dwd-ana.yaml
@@ -57,85 +57,75 @@ JOBS:
   # EXECUTABLE: /my_python_env/python3
 
   BUILD_ICON:
-    FILE: templates/build_icon.sh
-    WALLCLOCK: 01:00
+    FILE: templates/common/build_icon.sh
+    WALLCLOCK: 04:00
     PROCESSORS: 16
 
-  BUILD_ENSTOOLS:
-    FILE: templates/build_enstools.sh
+  BUILD_PYTHON_ENVIRONMENT:
+    FILE: templates/common/build_python_environment.sh
+    # Right now we rely on spack for building icon and having a python interpreter, so we need this dependency:
     DEPENDENCIES: BUILD_ICON
     WALLCLOCK: 01:00
     PROCESSORS: 16
 
-  PREPARE_RUNDIR:
-    FILE: templates/prepare_rundir.sh
+  TRANSFER_NAMELISTS:
+    FILE: templates/common/transfer_namelists.sh
+    PLATFORM: LOCAL
+
+  PREPARE_EXPERIMENT:
+    FILE: templates/real-from-dwd-ana/prepare_experiment.sh
     DEPENDENCIES: BUILD_ICON
+    RUNNING: once
+    WALLCLOCK: 01:00
+
+  PREPARE_DATE:
+    FILE: templates/real-from-dwd-ana/prepare_date.sh
+    RUNNING: date
+    WALLCLOCK: 01:00
+
+  PREPARE_MEMBER:
+    FILE: templates/real-from-dwd-ana/prepare_member.sh
+    DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE
     RUNNING: member
     WALLCLOCK: 01:00
 
   PREPARE_CHUNK:
-    FILE: templates/prepare_chunk.sh
-    DEPENDENCIES: PREPARE_RUNDIR RUN_ICON-1
+    FILE: templates/real-from-dwd-ana/prepare_chunk.py
+    DEPENDENCIES: TRANSFER_NAMELISTS BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
     WALLCLOCK: 00:05
     RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
 
   RUN_ICON:
-    FILE: templates/run_icon.sh
+    FILE: templates/common/run_icon.sh
     DEPENDENCIES: PREPARE_CHUNK
-    WALLCLOCK: 01:00
+    WALLCLOCK: 04:00
     RUNNING: chunk
-    PROCESSORS: 16
-    CUSTOM_DIRECTIVES: [ "export OMPI_MCA_btl_tcp_if_include=10.0.0.0/8" ]
+    PROCESSORS: 64
+    MEMORY: 81920
+    CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive"]
 
   COMPRESS:
-    FILE: templates/compress.py
-    DEPENDENCIES: RUN_ICON BUILD_ENSTOOLS COMPRESS-1
+    FILE: templates/common/compress.py
+    DEPENDENCIES: RUN_ICON BUILD_PYTHON_ENVIRONMENT COMPRESS-1
     RUNNING: chunk
     TYPE: python
-    EXECUTABLE: "%HPCROOTDIR%/venv/bin/python3"
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+    PROCESSORS: 16
+    MEMORY: 16384
 
   TRANSFER:
-    FILE: templates/transfer.sh
+    FILE: templates/common/transfer.sh
     DEPENDENCIES: COMPRESS
-    WALLCLOCK: 00:10
+    # Since this is running locally, can simply leave a long wallclock.
+    WALLCLOCK: 24:00
     RUNNING: member
     PLATFORM: LOCAL
 
   CLEAN:
-    FILE: templates/clean.sh
+    FILE: templates/common/clean.sh
     DEPENDENCIES: TRANSFER
     WALLCLOCK: 00:10
-    RUNNING: member
-
-
-#    INI:
-#        FILE: INI.sh
-#        DEPENDENCIES: REMOTE_SETUP
-#        RUNNING: member
-#        WALLCLOCK: 00:05
-#
-#    SIM:
-#        FILE: SIM.sh
-#        DEPENDENCIES: INI SIM-1 CLEAN-2
-#        RUNNING: chunk
-#        WALLCLOCK: 00:05
-#        PROCESSORS: 2
-#        THREADS: 1
-#
-#    POST:
-#        FILE: POST.sh
-#        DEPENDENCIES: SIM
-#        RUNNING: chunk
-#        WALLCLOCK: 00:05
-#
-#    CLEAN:
-#        FILE: CLEAN.sh
-#        DEPENDENCIES: POST
-#        RUNNING: chunk
-#        WALLCLOCK: 00:05
-#
-#    TRANSFER:
-#        FILE: TRANSFER.sh
-#        PLATFORM: LOCAL
-#        DEPENDENCIES: CLEAN
-#        RUNNING: member
\ No newline at end of file
+    RUNNING: member
\ No newline at end of file
diff --git a/conf/proj.yaml b/conf/proj.yaml
deleted file mode 100644
index df6da849402580b68d50a6e04551dc8352f51fd7..0000000000000000000000000000000000000000
--- a/conf/proj.yaml
+++ /dev/null
@@ -1,20 +0,0 @@
-spack:
-  url: git@gitlab.physik.uni-muenchen.de:LDAP_rbg/spack.git
-  branch: lmu/ubuntu20.04-icon
-  compiler: gcc@11.3.0
-
-icon:
-  version: 2.6.4-nwp3
-
-simulation:
-  BGRID_GLOBAL: icon_grid_0012_R02B04_G.nc
-  RGRID_GLOBAL: icon_grid_0011_R02B03_R.nc
-  BGRID_NEST: icon_grid_nest_G_DOM01.nc
-  RGRID_NEST: icon_grid_nest_R_DOM01.nc
-  BGRID_NEST_LATBC: icon_grid_nest_G_DOM01_latbc.nc
-  OUTPUT_FILES: "init-test_DOM01_ML_*.nc init-test-ext_DOM01_ML_*.nc"
-  FILES_TO_CLEAN: "*.nc"
-
-
-data_management:
-  local_folder: /scratch/o/Oriol.Tinto/tmp/
\ No newline at end of file
diff --git a/conf/proj_real-from-dwd-ana.yaml b/conf/proj_real-from-dwd-ana.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..027bc9f5866e3e109fb08a063e4ff242f809274d
--- /dev/null
+++ b/conf/proj_real-from-dwd-ana.yaml
@@ -0,0 +1,42 @@
+spack:
+  url: git@gitlab.physik.uni-muenchen.de:LDAP_rbg/spack.git
+  branch: lmu/ubuntu20.04-icon
+  compiler: gcc@11.3.0
+
+icon:
+  version: 2.6.5-nwp0
+
+python_environment:
+  # Name of the virtual environment in the remote platform experiment folder
+  folder_name: python_environment
+  requirements:
+    # Because there's an issue with numba, for now we need to keep a specific version of numpy
+    - numpy==1.23
+    - enstools-compression
+    # Just to try a library from a git repository.
+    - git+https://gitlab.physik.uni-muenchen.de/Oriol.Tinto/otils.git
+    - f90nml
+
+simulation:
+  dynamics_grid_filename: icon_grid_0016_R02B06_G.nc
+  radiation_grid_filename: icon_grid_0015_R02B05_R.nc
+  external_parameters_filename: icon_extpar_0016_R02B06_G_20131206.nc
+  date_format: '%Y-%m-%dT%H:%M:%SZ'
+  namelist_paths:
+    # Path to the namelists
+    master: "%HPCROOTDIR%/namelists/icon_master_real-from-dwd-ana.namelist"
+    atmosphere: "%HPCROOTDIR%/namelists/icon_atmosphere_real-from-dwd-ana.namelist"
+
+  # List of output file names that will be copied (Wildcards * allowed)
+  output_file_names: "init_DOM01_ML_*.nc latbc_DOM01_ML_*.nc"
+  files_to_clean: "*.nc"
+
+data_management:
+  # Where do we put the output files afterwards?
+  local_destination_folder: /scratch/o/Oriol.Tinto/tmp/
+
+initial_conditions:
+  # Where are we getting our initial data from?
+  parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/
+  member: 1
+
diff --git a/namelists/icon_atmosphere_real-from-dwd-ana.namelist b/namelists/icon_atmosphere_real-from-dwd-ana.namelist
new file mode 100644
index 0000000000000000000000000000000000000000..d05040fa82ff6cc2ddc4c911aaa9130b9195830b
--- /dev/null
+++ b/namelists/icon_atmosphere_real-from-dwd-ana.namelist
@@ -0,0 +1,89 @@
+&run_nml
+    ltestcase                   = .false.
+    dtime                       =  180
+    output                      = 'nml'
+    msg_level                   = 15
+    num_lev                     = 90
+    lvert_nest                  = .false.
+    ldynamics                   = .true.
+    ltransport                  = .true.
+    ntracer                     = 5
+    iforcing                    = 3
+/
+
+&time_nml
+    dt_restart = '%checkpoint_time%'
+/
+
+&io_nml
+    dt_checkpoint = '%checkpoint_time%'
+/
+
+&nwp_phy_nml
+    lupatmo_phy = .FALSE.
+/
+
+&grid_nml
+    dynamics_parent_grid_id     = 0
+    dynamics_grid_filename      = '%dynamics_grid_filename%'
+    radiation_grid_filename     = '%radiation_grid_filename%'
+    lredgrid_phys               = .true.
+/
+
+&extpar_nml
+    itopo                       = 1
+    extpar_filename             = '%external_parameters_filename%'
+/
+
+&initicon_nml
+    init_mode                   = 1,
+    dwdfg_filename              = '%first_guess_filename%'
+    dwdana_filename             = '%analysis_filename%'
+    lconsistency_checks         = .false.
+    ana_varnames_map_file       = 'ana_varnames_map_file.txt'
+/
+
+! settings from operational setup for vertical coordinate
+&sleve_nml
+    min_lay_thckn               = 20.
+    max_lay_thckn               = 400.
+    htop_thcknlimit             = 14000.
+    top_height                  = 75000.
+    stretch_fac                 = 0.9
+    decay_scale_1               = 4000.
+    decay_scale_2               = 2500.
+    decay_exp                   = 1.2
+    flat_height                 = 16000.
+/
+
+&nonhydrostatic_nml
+    damp_height                 = 22000.
+    rayleigh_coeff              = 1
+/
+
+
+&parallel_nml
+    nproma                      = 16
+/
+
+! LATBC files, these files will be used as input for the next example.
+&output_nml
+    file_interval               = 'PT3600S'
+    output_start                = '%Chunk_START_DATE%'
+    output_end                  = '%Chunk_END_DATE%'
+    output_filename             = "latbc"
+    output_interval             = 'PT3600S'
+    include_last                = .true.
+    ml_varlist                  = 'u', 'v', 'w', 'theta_v', 'rho', 'qv', 'qc', 'qi', 'qr', 'qs', 'z_ifc'
+/
+
+! First Guess file
+&output_nml
+    file_interval               = 'PT3600S'
+    output_start                = '%Chunk_START_DATE%'
+    output_end                  = '%Chunk_END_DATE%'
+    output_filename             = "init"
+    output_interval             = 'PT3600S'
+    include_last                = .true.
+    ml_varlist                  = 'group:dwd_fg_atm_vars', 'group:dwd_fg_sfc_vars'
+/
diff --git a/namelists/icon_master_real-from-dwd-ana.namelist b/namelists/icon_master_real-from-dwd-ana.namelist
new file mode 100644
index 0000000000000000000000000000000000000000..fe401c4424536da7f88b7dd09bfc28b6d531b197
--- /dev/null
+++ b/namelists/icon_master_real-from-dwd-ana.namelist
@@ -0,0 +1,16 @@
+&master_nml
+    lrestart                    = "%is_restart%"
+    lrestart_write_last         = .true.
+/
+
+&master_model_nml
+    model_type =                1                       ! atmospheric model
+    model_name =                "ATMO"                  ! name of this model component
+    model_namelist_filename =   "icon_atmosphere.namelist"
+/
+
+&master_time_control_nml
+    calendar                    = "proleptic gregorian"
+    experimentStartDate         = '%Chunk_START_DATE%'
+    experimentStopDate          = '%Chunk_END_DATE%'
+/
\ No newline at end of file
diff --git a/templates/build_icon.sh b/templates/build_icon.sh
deleted file mode 100644
index b5ef0337e13420977b9cc3bf2db2d28d94119a07..0000000000000000000000000000000000000000
--- a/templates/build_icon.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-# Get some variables provided by autosubmit.
-# TODO: What do we do to ensure that these variables are defined in the proj file?
-WORKDIR=%HPCROOTDIR%
-ICON_VERSION=%ICON_VERSION%
-SPACK_URL=%spack.url%
-SPACK_BRANCH=%spack.branch%
-SPACK_COMPILER=%spack.compiler%
-
-
-# If the workdir directory does not exist create it
-if [ ! -d ${WORKDIR} ] ; then
-        mkdir -p ${WORKDIR}
-fi
-
-# Go to the working directory
-cd ${WORKDIR}
-
-# Check if experiment's spack installation already exists, if it doesn't, clone it.
-SPACK_ENV=spack/share/spack/setup-env.sh
-if [ ! -f ${SPACK_ENV} ] ; then
-  git clone ${SPACK_URL} -b ${SPACK_BRANCH}
-  #TODO: Would be good to enable the re-utilization of existing spack packages (via packages.yaml or upstreams.yaml)
-fi
-
-# Setup the environment
-source ${SPACK_ENV}
-
-
-if [ $( spack find icon-nwp@${ICON_VERSION} &> /dev/null ; echo $? ) -ne 0 ]; then
-        echo "Installing icon-nwp@${ICON_VERSION}."
-        # Compile openmpi with schedulers=slurm
-        spack install openmpi%${SPACK_COMPILER}+legacylaunchers schedulers=slurm
-        spack install icon-nwp@${ICON_VERSION}%${SPACK_COMPILER} ^openmpi%${SPACK_COMPILER}
-else
-        echo "icon-nwp@${ICON_VERSION} already installed!"
-fi
diff --git a/templates/common/build_icon.sh b/templates/common/build_icon.sh
new file mode 100644
index 0000000000000000000000000000000000000000..e1db87552109fb6b78a7b6cb1a300c1a33ab23bd
--- /dev/null
+++ b/templates/common/build_icon.sh
@@ -0,0 +1,51 @@
+# Get some variables provided by autosubmit.
+# TODO: What do we do to ensure that these variables are defined in the proj file?
+WORKDIR=%HPCROOTDIR%
+ICON_VERSION=%ICON_VERSION%
+SPACK_URL=%spack.url%
+SPACK_BRANCH=%spack.branch%
+SPACK_COMPILER=%spack.compiler%
+
+# If the workdir directory does not exist create it
+if [ ! -d ${WORKDIR} ]; then
+  mkdir -p ${WORKDIR}
+fi
+
+# Go to the working directory
+cd ${WORKDIR}
+
+# Check if experiment's spack installation already exists, if it doesn't, clone it.
+SPACK_ENV=spack/share/spack/setup-env.sh
+if [ ! -f ${SPACK_ENV} ]; then
+  git clone ${SPACK_URL} -b ${SPACK_BRANCH}
+  #TODO: Would be good to enable the re-utilization of existing spack packages (via packages.yaml or upstreams.yaml)
+fi
+
+# Setup the environment
+source ${SPACK_ENV}
+
+if [ $(
+  spack find icon-nwp@${ICON_VERSION} &>/dev/null
+  echo $?
+) -ne 0 ]; then
+  echo "Installing icon-nwp@${ICON_VERSION}."
+  # Compile openmpi with schedulers=slurm
+  spack install openmpi%${SPACK_COMPILER}+legacylaunchers schedulers=slurm
+  spack install icon-nwp@${ICON_VERSION}%${SPACK_COMPILER} ^openmpi%${SPACK_COMPILER}
+else
+  echo "icon-nwp@${ICON_VERSION} already installed!"
+fi
+
+# Need to get ECCODES DWD definitions:
+eccodes_version=$(spack find eccodes | grep eccodes@ | cut -d "@" -f 2)
+
+definitions_tar_file=eccodes_definitions.edzw-${eccodes_version}-1.tar.bz2
+if [ ! -f ${definitions_tar_file} ]; then
+  defs_url=https://opendata.dwd.de/weather/lib/grib/${definitions_tar_file}
+  wget ${defs_url}
+
+  # Decompress definitions file
+  tar -xf ${definitions_tar_file}
+  # Create a file containing the environment variable that needs to be set in order to use DWD's definitions:
+  echo "export ECCODES_DEFINITION_PATH=${WORKDIR}/definitions.edzw-${eccodes_version}-1" >eccodes_defs.env
+fi
diff --git a/templates/build_enstools.sh b/templates/common/build_python_environment.sh
similarity index 58%
rename from templates/build_enstools.sh
rename to templates/common/build_python_environment.sh
index b7a86d18eea906769ec281fac8da9fec43e3298e..6388ef7bddafa8d48c1cf6a3198f0c130c3d8393 100644
--- a/templates/build_enstools.sh
+++ b/templates/common/build_python_environment.sh
@@ -17,39 +17,48 @@ cd ${WORKDIR}
 # Check if experiment's spack installation already exists, if it doesn't, clone it.
 SPACK_ENV=spack/share/spack/setup-env.sh
 if [ ! -f ${SPACK_ENV} ]; then
-  git clone ${SPACK_URL} -b ${SPACK_BRANCH}
-  #TODO: Would be good to enable the re-utilization of existing spack packages (via packages.yaml or upstreams.yaml)
+  echo "Spack folder not found!"
+  exit 1
 fi
 
-# Setup the environment
+# Setup the spack environment
 source ${SPACK_ENV}
 
 # Use spack to get a recent enough version of python3
-if [ $(
-  spack find python@3.8: &>/dev/null
+if [ $( spack find python@3.8: &>/dev/null
   echo $?
 ) -ne 0 ]; then
   echo "Installing a version of python3"
   # Compile openmpi with schedulers=slurm
   spack install python@3.8:
 else
-  echo "py-enstools-compression already installed!"
+  echo "python@3.8: already installed!"
 fi
 
-# Link the python binary into the main folder so can be easily used later:
-
+# Load the python module
 spack load python@3.8:
 
-# Create virtual environment
-python3 -m venv --prompt AS venv
+PYTHON_ENVIRONMENT_FOLDER=%python_environment.folder_name%
+
+if [ ! -d ${PYTHON_ENVIRONMENT_FOLDER} ]; then
+  # Create virtual environment
+  python3 -m venv --prompt AS ${PYTHON_ENVIRONMENT_FOLDER}
+fi
 
 # Load environment
-source venv/bin/activate
+source ${PYTHON_ENVIRONMENT_FOLDER}/bin/activate
 
 # Create a link to the binary
 ln -sf $(which python3) ${WORKDIR}/python3
 
-# Install enstools-compression via pip
+# Install the requirements via pip
 # TODO: Due to a incompatibility issue between latest numba and numpy I have to add this here. Hopefully removable soon.
-pip install numpy==1.23
-pip install enstools-compression
+requirements="%python_environment.requirements%"
+
+# Convert list with python format to a bash array
+requirements=($( echo ${requirements} | sed "s/'//g" | tr -d '[],'))
+
+# Install requirements.
+for requirement in ${requirements[@]} ; do
+  python -m pip install ${requirement}
+done
diff --git a/templates/clean.sh b/templates/common/clean.sh
similarity index 72%
rename from templates/clean.sh
rename to templates/common/clean.sh
index 578dfee16b7d0bd2f45a2733efeae301d28a6e4f..6b19b8dca823a5a26a52d4781955dcd9c27508d8 100644
--- a/templates/clean.sh
+++ b/templates/common/clean.sh
@@ -13,4 +13,9 @@ RUNDIR=${WORKDIR}/${STARTDATE}/${MEMBER}
 # Copy the output files
 for file_name in ${FILES_TO_CLEAN}; do
   rm ${RUNDIR}/${file_name}
-done
\ No newline at end of file
+done
+
+# Remove the output files from the remote machine
+OUTPUT_DIR=${WORKDIR}/output/${STARTDATE}/${MEMBER}
+
+rm ${OUTPUT_DIR}/*
\ No newline at end of file
diff --git a/templates/compress.py b/templates/common/compress.py
similarity index 75%
rename from templates/compress.py
rename to templates/common/compress.py
index ce15b39ed413aab41682655c9f2959a11a790f60..af0c18dc511704deee05a7f8a905d10dbc19ad3a 100644
--- a/templates/compress.py
+++ b/templates/common/compress.py
@@ -1,12 +1,12 @@
-#!/bin/env python3
 import glob
 import logging
 
 # Set logging level to info.
-logging.basicConfig(level=logging.INFO)
+logger = logging.getLogger("compress")
+logger.setLevel(logging.INFO)
 
 
-def main():
+def compress_outputs():
     import os
     from pathlib import Path
     import enstools.compression.api
@@ -31,6 +31,9 @@ def main():
 
     output_files = [Path(f) for f in output_files]
 
+    if not output_files:
+        logger.warning("The list of files is empty!")
+
     # Define output dir and create it in case it doesn't exist
     OUTPUT_DIR = WORKDIR / "output" / STARTDATE / MEMBER
 
@@ -41,15 +44,15 @@ def main():
     for source_file_path in output_files:
         destination_file_path = OUTPUT_DIR / source_file_path.name
 
-        logging.info(f"Copying {source_file_path.name!r}")
+        logger.info(f"Copying {source_file_path.name!r}")
         enstools.compression.api.compress(source_file_path, output=destination_file_path, compression="lossless")
 
         # Remove source files
         if destination_file_path.exists():
-            logging.info(
-                f"{source_file_path.name!r} copied to {destination_file_path.as_posix()!r}. Removing {source_file_path.as_posix()!r}")
+            logger.info(f"{source_file_path.name!r} copied to {destination_file_path.as_posix()!r}.")
+            logger.info(f"Removing {source_file_path.as_posix()!r}")
             source_file_path.unlink()
 
 
 if __name__ == "__main__":
-    main()
+    compress_outputs()
diff --git a/templates/common/prepare_chunk.sh b/templates/common/prepare_chunk.sh
new file mode 100644
index 0000000000000000000000000000000000000000..f99dfcd65100425227cbae0123dd3ea7c75d6ce3
--- /dev/null
+++ b/templates/common/prepare_chunk.sh
@@ -0,0 +1,58 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+
+dynamics_grid_filename=%simulation.dynamics_grid_filename%
+radiation_grid_filename=%simulation.radiation_grid_filename%
+external_parameters_filename=%simulation.external_parameters_filename%
+
+
+STARTDATE=%SDATE%
+
+# Example of date format "2018-06-01T00:00:00Z"
+START_YEAR=%Chunk_START_YEAR%
+START_MONTH=%Chunk_START_MONTH%
+START_DAY=%Chunk_START_DAY%
+START_HOUR=%Chunk_START_HOUR%
+
+END_YEAR=%Chunk_END_YEAR%
+END_MONTH=%Chunk_END_MONTH%
+END_DAY=%Chunk_END_DAY%
+END_HOUR=%Chunk_END_HOUR%
+
+Chunk_START_DATE="${START_YEAR}-${START_MONTH}-${START_DAY}T${START_HOUR}:00:00Z"
+Chunk_END_DATE="${END_YEAR}-${END_MONTH}-${END_DAY}T${END_HOUR}:00:00Z"
+
+# Convert dates to Unix timestamps
+t1_unix=$(date -d "$Chunk_START_DATE" +%s)
+t2_unix=$(date -d "$Chunk_END_DATE" +%s)
+
+# Compute difference in seconds
+checkpoint_time=$(((t2_unix - t1_unix)))
+
+# Compute number of steps
+dtime=180
+
+# Extend chunk 10 minutes to ensure checkpoint creation at the proper time
+# TODO: Works but it is a bit ugly.
+Chunk_END_DATE="${END_YEAR}-${END_MONTH}-${END_DAY}T${END_HOUR}:10:00Z"
+
+MEMBER=%MEMBER%
+CHUNK=%CHUNK%
+
+# If the chunk is not the first one, start from a restart file.
+if [[ "${CHUNK}" -eq "1" ]]; then
+  is_restart=.false.
+else
+  is_restart=.true.
+fi
+
+# Define rundir
+RUNDIR=${WORKDIR}/${STARTDATE}/${MEMBER}
+
+cd ${RUNDIR} || exit
+
+# Get AN and FG file names from file that was saved during prepare_rundir
+AN_FILE=$( cat an_file.txt )
+FG_FILE=$( cat fg_file.txt )
diff --git a/templates/run_icon.sh b/templates/common/run_icon.sh
similarity index 81%
rename from templates/run_icon.sh
rename to templates/common/run_icon.sh
index f67ee87f32d3cf931e4bcbbb87236883ba2c1f2c..1a5fec30d901590072f9aa3135292dba299a6bfd 100644
--- a/templates/run_icon.sh
+++ b/templates/common/run_icon.sh
@@ -17,5 +17,8 @@ source ${SPACK_ENV}
 # Load icon module
 spack load icon-nwp@%ICON_VERSION%
 
+# Set environment variable for eccodes-dwd definitions:
+source ${WORKDIR}/eccodes_defs.env
+
 # Run icon
 srun icon
diff --git a/templates/transfer.sh b/templates/common/transfer.sh
similarity index 87%
rename from templates/transfer.sh
rename to templates/common/transfer.sh
index d35ff47fa35056335df5730fb1c9c41e5365909a..795feea3e7a497d1b9f495cc05e93cc6c7d5f6ac 100644
--- a/templates/transfer.sh
+++ b/templates/common/transfer.sh
@@ -11,7 +11,7 @@ HPCHOST=%HPCHOST%
 # Define output dir in remote machine
 OUTPUT_DIR=${WORKDIR}/output/${STARTDATE}/${MEMBER}
 
-MAIN_LOCAL_FOLDER=%data_management.local_folder%/%DEFAULT.EXPID%
+MAIN_LOCAL_FOLDER=%data_management.local_destination_folder%/%DEFAULT.EXPID%
 
 DESTINATION_DIR=${MAIN_LOCAL_FOLDER}/${STARTDATE}/${MEMBER}
 
diff --git a/templates/common/transfer_namelists.sh b/templates/common/transfer_namelists.sh
new file mode 100644
index 0000000000000000000000000000000000000000..0dbd9038b606d66f1d6d4f6afe06a3a7c515307f
--- /dev/null
+++ b/templates/common/transfer_namelists.sh
@@ -0,0 +1,13 @@
+# Synchronize the local namelists with the remote directory
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+HPCUSER=%HPCUSER%
+HPCHOST=%HPCHOST%
+
+# Define local and remote namelists folders
+REMOTE_WORKDIR=${WORKDIR}/
+LOCAL_NAMELISTS_FOLDER="%PROJDIR%/namelists"
+
+# Transfer the namelists
+rsync -v -u -r --no-relative ${LOCAL_NAMELISTS_FOLDER} ${HPCUSER}@${HPCHOST}:${REMOTE_WORKDIR}
diff --git a/templates/prepare_chunk.sh b/templates/prepare_chunk.sh
deleted file mode 100644
index be8d2b3a516cc74ec1cf5912114c15f5e332e0b2..0000000000000000000000000000000000000000
--- a/templates/prepare_chunk.sh
+++ /dev/null
@@ -1,147 +0,0 @@
-#!/bin/bash -l
-
-# Get some variables provided by autosubmit.
-WORKDIR=%HPCROOTDIR%
-ICON_VERSION=%ICON_VERSION%
-
-BGRID_GLOBAL=%simulation.BGRID_GLOBAL%
-RGRID_GLOBAL=%simulation.RGRID_GLOBAL%
-
-STARTDATE=%SDATE%
-
-# Example of date format "2018-06-01T00:00:00Z"
-START_YEAR=%Chunk_START_YEAR%
-START_MONTH=%Chunk_START_MONTH%
-START_DAY=%Chunk_START_DAY%
-START_HOUR=%Chunk_START_HOUR%
-
-END_YEAR=%Chunk_END_YEAR%
-END_MONTH=%Chunk_END_MONTH%
-END_DAY=%Chunk_END_DAY%
-END_HOUR=%Chunk_END_HOUR%
-
-Chunk_START_DATE="${START_YEAR}-${START_MONTH}-${START_DAY}T${START_HOUR}:00:00Z"
-Chunk_END_DATE="${END_YEAR}-${END_MONTH}-${END_DAY}T${END_HOUR}:00:00Z"
-
-# Convert dates to Unix timestamps
-t1_unix=$(date -d "$Chunk_START_DATE" +%s)
-t2_unix=$(date -d "$Chunk_END_DATE" +%s)
-
-# Compute difference in seconds
-checkpoint_time=$(((t2_unix - t1_unix)))
-
-# Compute number of steps
-dtime=300
-nsteps=$((checkpoint_time / ${dtime}))
-
-# Extend chunk 10 minutes to ensure checkpoint creation at the proper time
-# TODO: Works but it is a bit ugly.
-Chunk_END_DATE="${END_YEAR}-${END_MONTH}-${END_DAY}T${END_HOUR}:10:00Z"
-
-MEMBER=%MEMBER%
-CHUNK=%CHUNK%
-
-# If the chunk is not the first one, start from a restart file.
-if [[ "${CHUNK}" -eq "1" ]]; then
-  is_restart=.false.
-else
-  is_restart=.true.
-fi
-
-# Define rundir
-RUNDIR=${WORKDIR}/${STARTDATE}/${MEMBER}
-
-cd ${RUNDIR}
-
-cat >icon_master.namelist <<EOF
-&master_nml
-    lrestart                    = ${is_restart}
-    lrestart_write_last         = .true.
-/
-
-&master_model_nml
-    model_type =                1                       ! atmospheric model
-    model_name =                "ATMO"                  ! name of this model component
-    model_namelist_filename =   "icon_atmosphere.namelist"
-/
-
-&master_time_control_nml
-    calendar                    = "proleptic gregorian"
-    experimentStartDate         = '${Chunk_START_DATE}'
-    experimentStopDate          = '${Chunk_END_DATE}'
-/
-EOF
-
-cat >icon_atmosphere.namelist <<EOF
-&run_nml
-    ltestcase                   = .TRUE.        ! idealized testcase runs
-    dtime                       = 300           ! time step of 300 seconds
-    output                      = 'nml'         ! use output nameslists
-    msg_level                   = 15
-    nsteps                      = ${nsteps}
-    num_lev                     = 31
-    lvert_nest                  = .false.
-    ldynamics                   = .true.
-    ltransport                  = .true.
-    ntracer                     = 5
-    iforcing                    = 3
-/
-
-&time_nml
-dt_restart = ${checkpoint_time}
-/
-
-&io_nml
-  dt_checkpoint            =  ${checkpoint_time}
-/
-
-&grid_nml
-    dynamics_grid_filename      = '$BGRID_GLOBAL'
-    radiation_grid_filename     = '$RGRID_GLOBAL'
-    dynamics_parent_grid_id     = 0
-    lredgrid_phys               = .true.
-/
-
-&nh_testcase_nml
-    nh_test_name                = 'APE_nwp'   ! testcase selection
-    ape_sst_case                = 'sst_qobs'
-/
-
-&nonhydrostatic_nml
-    damp_height                 = 18000
-    rayleigh_coeff              = 0.75
-/
-
-&nwp_phy_nml
-    inwp_surface                = 0
-/
-
-&parallel_nml
-    nproma                      = 16
-/
-
-! the following two output files are used to initialize the next run
-&output_nml
-    file_interval               = 'PT3600S'
-    output_start                = '${Chunk_START_DATE}'
-    output_end                  = '${Chunk_END_DATE}'
-    output_filename             = "init-test"
-    output_interval             = 'PT3600S'
-    include_last                = .true.
-    mode                        = 1
-    taxis_tunit                 = 1
-    ml_varlist                  = 'group:dwd_fg_atm_vars', 'group:dwd_fg_sfc_vars'
-    remap                       = 1
-    reg_lon_def                 = -30.,0.5,30.
-    reg_lat_def                 = 90.,-0.5, -90.
-/
-&output_nml
-    steps_per_file              = 1
-    output_start                = '${Chunk_START_DATE}'
-    output_end                  = '${Chunk_START_DATE}'
-    output_filename             = "init-test-ext"
-    include_last                = .true.
-    output_interval             = 'PT3600S'
-    ml_varlist                  = 'depth_lk', 'emis_rad', 'fr_lake', 'fr_land', 'topography_c', 'soiltyp', 'sso_stdh', 'sso_theta', 'sso_gamma', 'sso_sigma'
-/
-EOF
diff --git a/templates/prepare_rundir.sh b/templates/prepare_rundir.sh
deleted file mode 100644
index 99fef65c204447ee617d2750018a95b0844099b5..0000000000000000000000000000000000000000
--- a/templates/prepare_rundir.sh
+++ /dev/null
@@ -1,46 +0,0 @@
-#!/bin/bash -l
-
-# Get some variables provided by autosubmit.
-WORKDIR=%HPCROOTDIR%
-ICON_VERSION=%ICON_VERSION%
-
-BGRID_GLOBAL=%simulation.BGRID_GLOBAL%
-RGRID_GLOBAL=%simulation.RGRID_GLOBAL%
-
-STARTDATE=%SDATE%
-
-MEMBER=%MEMBER%
-
-# Define rundir
-RUNDIR=${WORKDIR}/${STARTDATE}/${MEMBER}
-
-# Activate spack
-SPACK_ENV=${WORKDIR}/spack/share/spack/setup-env.sh
-source ${SPACK_ENV}
-
-# Load icon module
-spack load icon-nwp@%ICON_VERSION%
-
-# Create member folder and go there
-mkdir -p ${RUNDIR}
-
-cd ${RUNDIR}
-
-# Download or copy required input files
-function download_file() {
-  URL=$1
-  FILE=${2:-$(basename $URL)}
-  if [ ! -e $FILE ]; then
-    echo "Download $URL => $FILE"
-    wget -q $URL -O $FILE
-  fi
-}
-
-# Download or copy required input files
-
-download_file http://icon-downloads.mpimet.mpg.de/grids/public/edzw/$BGRID_GLOBAL
-download_file http://icon-downloads.mpimet.mpg.de/grids/public/edzw/$RGRID_GLOBAL
-
-# input for radiation
-ln -sf ${ICON_DATA_PATH}/rrtmg_lw.nc .
-ln -sf ${ICON_DATA_PATH}/ECHAM6_CldOptProps.nc .
diff --git a/templates/real-from-dwd-ana/prepare_chunk.py b/templates/real-from-dwd-ana/prepare_chunk.py
new file mode 100644
index 0000000000000000000000000000000000000000..6656e4ef13fa0deb15a29eb1d74078a5e721f99c
--- /dev/null
+++ b/templates/real-from-dwd-ana/prepare_chunk.py
@@ -0,0 +1,95 @@
+import logging
+import re
+from datetime import datetime, timedelta
+from pathlib import Path
+
+import f90nml
+
+logger = logging.getLogger("prepare_chunk")
+logger.setLevel(logging.INFO)
+
+# Get some autosubmit variables
+WORKDIR = "%HPCROOTDIR%"
+STARTDATE = "%SDATE%"
+MEMBER = "%MEMBER%"
+CHUNK = "%CHUNK%"
+
+# Example of date format "2018-06-01T00:00:00Z"
+date_format = "%simulation.date_format%"
+
+START_YEAR = "%Chunk_START_YEAR%"
+START_MONTH = "%Chunk_START_MONTH%"
+START_DAY = "%Chunk_START_DAY%"
+START_HOUR = "%Chunk_START_HOUR%"
+
+END_YEAR = "%Chunk_END_YEAR%"
+END_MONTH = "%Chunk_END_MONTH%"
+END_DAY = "%Chunk_END_DAY%"
+END_HOUR = "%Chunk_END_HOUR%"
+
+Chunk_START_DATE = datetime(year=int(START_YEAR), month=int(START_MONTH), day=int(START_DAY), hour=int(START_HOUR))
+Chunk_END_DATE = datetime(year=int(END_YEAR), month=int(END_MONTH), day=int(END_DAY), hour=int(END_HOUR))
+
+# Compute difference in seconds
+checkpoint_time = int((Chunk_END_DATE - Chunk_START_DATE).total_seconds())
+
+# TODO: Is that really necessary?
+# Add 10 minutes to allow the model to write the restarts
+Chunk_END_DATE = Chunk_END_DATE + timedelta(minutes=10)
+# Get run directory
+RUNDIR = Path(f"{WORKDIR}/{STARTDATE}/{MEMBER}")
+
+# TODO: This is a bit ugly
+# Read first-guess and analysis filenames from files:
+first_guess_filename = (RUNDIR / "fg_file.txt").read_text().strip()
+analysis_filename = (RUNDIR / "an_file.txt").read_text().strip()
+
+# Get some variable replacements from the proj.yml file through autosubmit
+variable_replacements = {
+    "dynamics_grid_filename": "%simulation.dynamics_grid_filename%",
+    "radiation_grid_filename": "%simulation.radiation_grid_filename%",
+    "external_parameters_filename": "%simulation.external_parameters_filename%",
+    "first_guess_filename": first_guess_filename,
+    "analysis_filename": analysis_filename,
+    "Chunk_START_DATE": Chunk_START_DATE.strftime(date_format),
+    "Chunk_END_DATE": Chunk_END_DATE.strftime(date_format),
+    "is_restart": False if "%CHUNK%" == "1" else True,
+    "checkpoint_time": checkpoint_time,
+}
+
+
+def adapt_namelist(input_namelist: str, output_namelist: str):
+    input_namelist = Path(input_namelist)
+    output_namelist = Path(output_namelist)
+
+    namelist = f90nml.read(input_namelist.as_posix())
+    group_keys = [gk for gk in namelist]
+
+    for group in group_keys:
+        variable_keys = [vk for vk in namelist[group]]
+        for variable in variable_keys:
+            value = namelist[group][variable]
+            m = re.match(r"%(.*)%", str(value))
+            if m:
+                key = m.group(1)
+
+                if key not in variable_replacements:
+                    raise AssertionError(f"The namelist {input_namelist.as_posix()!r} contains the variable {key!r} "
+                                         f"which is not in the list of provided replacements:\n"
+                                         f"{[v for v in variable_replacements]}")
+                logger.info(f"Replacing {group}>{variable}:{key} with {variable_replacements[key]!r}")
+                namelist[group][variable] = variable_replacements[key]
+
+    f90nml.write(nml=namelist, nml_path=output_namelist.as_posix(), force=True)
+
+
+if __name__ == '__main__':
+    atmosphere_namelist_path = "%simulation.namelist_paths.atmosphere%"
+    master_namelist_path = "%simulation.namelist_paths.master%"
+
+    # Adapt atmosphere namelist
+    adapt_namelist(input_namelist=atmosphere_namelist_path,
+                   output_namelist=(RUNDIR / "icon_atmosphere.namelist").as_posix())
+    # Adapt master namelist
+    adapt_namelist(input_namelist=master_namelist_path,
+                   output_namelist=(RUNDIR / "icon_master.namelist").as_posix())
diff --git a/templates/real-from-dwd-ana/prepare_date.sh b/templates/real-from-dwd-ana/prepare_date.sh
new file mode 100644
index 0000000000000000000000000000000000000000..4152601c3603607a80529ea090ab4529d7e4b1aa
--- /dev/null
+++ b/templates/real-from-dwd-ana/prepare_date.sh
@@ -0,0 +1,44 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+STARTDATE=%SDATE%
+
+# Define date directory, create it and go there
+COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+# Create member folder and go there
+mkdir -p ${COMMON_DATE_FOLDER}
+cd ${COMMON_DATE_FOLDER} || exit
+
+# some settings
+AN_MEMBER=$(printf "%03d" %initial_conditions.member%)
+INITIAL_CONDITIONS_PARENT_FOLDER=%initial_conditions.parent_folder%
+
+INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:6}/${STARTDATE:0:8}T00
+
+AN_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igaf*00.m${AN_MEMBER}.grb" | sort | tail -n 1)
+FG_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igfff00030000.m${AN_MEMBER}.grb" | sort | tail -n 1)
+
+if [ ! -f "${AN_SOURCE}" ]; then
+  echo "Analysis file for date ${STARTDATE} not found!"
+  exit 1
+fi
+
+if [ ! -f "${FG_SOURCE}" ]; then
+  echo "FG file for date ${STARTDATE} not found!"
+  exit 1
+fi
+
+AN_FILE=$(basename "${AN_SOURCE}")
+FG_FILE=$(basename "${FG_SOURCE}")
+
+# Save filenames to be used later by other scripts.
+echo "${AN_FILE}" > an_file.txt
+echo "${FG_FILE}" > fg_file.txt
+
+# Copy the first-guess and analysis files.
+cp "${FG_SOURCE}" "${FG_FILE}"
+cp "${AN_SOURCE}" "${AN_FILE}"
+
+# Change permissions to read only.
+chmod 440 ./*
diff --git a/templates/real-from-dwd-ana/prepare_experiment.sh b/templates/real-from-dwd-ana/prepare_experiment.sh
new file mode 100644
index 0000000000000000000000000000000000000000..5d9b03dcf54d7cb2c3e32780033d78d6ca64d265
--- /dev/null
+++ b/templates/real-from-dwd-ana/prepare_experiment.sh
@@ -0,0 +1,45 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+DYNAMICS_GRID_FILENAME=%simulation.dynamics_grid_filename%
+RADIATION_GRID_FILE=%simulation.radiation_grid_filename%
+EXTERNAL_PARAMETERS_FILE=%simulation.external_parameters_filename%
+
+
+# Activate spack
+SPACK_ENV=${WORKDIR}/spack/share/spack/setup-env.sh
+source ${SPACK_ENV}
+
+# Load icon module needed to retrieve some data
+spack load icon-nwp@%ICON_VERSION%
+
+# Create a folder for the common inidata and go there
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+mkdir -p "${COMMON_INIDATA_FOLDER}"
+cd "${COMMON_INIDATA_FOLDER}" || exit
+
+# Download or copy required input files
+function download_file() {
+  URL=$1
+  FILE=${2:-$(basename "$URL")}
+  if [ ! -e "$FILE" ]; then
+    echo "Download $URL => $FILE"
+    wget -q "$URL" -O "$FILE"
+  fi
+}
+
+# Download grid files and external parameters
+BASEURL=http://icon-downloads.mpimet.mpg.de/grids/public/edzw
+download_file $BASEURL/${DYNAMICS_GRID_FILENAME}
+download_file $BASEURL/${RADIATION_GRID_FILE}
+download_file $BASEURL/${EXTERNAL_PARAMETERS_FILE}
+
+# Link input for radiation
+ln -sf "${ICON_DATA_PATH}/rrtmg_lw.nc" .
+ln -sf "${ICON_DATA_PATH}/ECHAM6_CldOptProps.nc" .
+ln -sf "${ICON_BASE_PATH}/run/ana_varnames_map_file.txt" .
+
+
+# Change permissions to read only.
+chmod 440 ./*
\ No newline at end of file
diff --git a/templates/real-from-dwd-ana/prepare_member.sh b/templates/real-from-dwd-ana/prepare_member.sh
new file mode 100644
index 0000000000000000000000000000000000000000..ff7ebc135904287721217e1666d5fcb85a20bfd4
--- /dev/null
+++ b/templates/real-from-dwd-ana/prepare_member.sh
@@ -0,0 +1,24 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+STARTDATE=%SDATE%
+MEMBER=%MEMBER%
+
+# Common folder with data needed for all simulations
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+# Common folder for the same start date
+COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+
+# Member folder
+MEMBER_DIR=${WORKDIR}/${STARTDATE}/${MEMBER}
+
+# Create member folder and go there
+mkdir -p ${MEMBER_DIR}
+
+cd ${MEMBER_DIR} || exit
+
+
+# Link all files from the common inidata folder and the common date folder
+ln -sf ${COMMON_INIDATA_FOLDER}/* .
+ln -sf ${COMMON_DATE_FOLDER}/* .