diff --git a/conf/jobs_real-from-dwd-ana.yaml b/conf/real-from-dwd-ana/jobs.yaml
similarity index 95%
rename from conf/jobs_real-from-dwd-ana.yaml
rename to conf/real-from-dwd-ana/jobs.yaml
index 94c33cf5d02e01a0134ac05daa0102651ee030f9..ba4cfc84bd81dcfda2ed0849fe8fdc2a701d41bc 100644
--- a/conf/jobs_real-from-dwd-ana.yaml
+++ b/conf/real-from-dwd-ana/jobs.yaml
@@ -56,8 +56,13 @@ JOBS:
   ## Specify the path to the interpreter. If empty, use system default based on job type  . Default: empty
   # EXECUTABLE: /my_python_env/python3
 
+  TRANSFER_PROJECT:
+    FILE: templates/common/transfer_project.sh
+    PLATFORM: LOCAL
+
   BUILD_ICON:
     FILE: templates/common/build_icon.sh
+    DEPENDENCIES: TRANSFER_PROJECT
     WALLCLOCK: 04:00
     PROCESSORS: 16
 
@@ -68,10 +73,6 @@ JOBS:
     WALLCLOCK: 01:00
     PROCESSORS: 16
 
-  TRANSFER_NAMELISTS:
-    FILE: templates/common/transfer_namelists.sh
-    PLATFORM: LOCAL
-
   PREPARE_EXPERIMENT:
     FILE: templates/real-from-dwd-ana/prepare_experiment.sh
     DEPENDENCIES: BUILD_ICON
@@ -91,7 +92,7 @@ JOBS:
 
   PREPARE_CHUNK:
     FILE: templates/real-from-dwd-ana/prepare_chunk.py
-    DEPENDENCIES: TRANSFER_NAMELISTS BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
+    DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
     WALLCLOCK: 00:05
     RUNNING: chunk
     TYPE: python
@@ -115,6 +116,7 @@ JOBS:
     EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
     PROCESSORS: 16
     MEMORY: 16384
+    WALLCLOCK: 01:00
 
   TRANSFER:
     FILE: templates/common/transfer.sh
diff --git a/conf/proj_real-from-dwd-ana.yaml b/conf/real-from-dwd-ana/proj.yaml
similarity index 88%
rename from conf/proj_real-from-dwd-ana.yaml
rename to conf/real-from-dwd-ana/proj.yaml
index 027bc9f5866e3e109fb08a063e4ff242f809274d..15528b240fc483832be08a57802851338bf51dc9 100644
--- a/conf/proj_real-from-dwd-ana.yaml
+++ b/conf/real-from-dwd-ana/proj.yaml
@@ -24,8 +24,8 @@ simulation:
   date_format: '%Y-%m-%dT%H:%M:%SZ'
   namelist_paths:
     # Path to the namelists
-    master: "%HPCROOTDIR%/namelists/icon_master_real-from-dwd-ana.namelist"
-    atmosphere: "%HPCROOTDIR%/namelists/icon_atmosphere_real-from-dwd-ana.namelist"
+    master: "%HPCROOTDIR%/proj/namelists/real-from-dwd-ana/icon_master.namelist"
+    atmosphere: "%HPCROOTDIR%/proj/namelists/real-from-dwd-ana/icon_atmosphere.namelist"
 
   # List of output file names that will be copied (Wildcards * allowed)
   output_file_names: "init_DOM01_ML_*.nc latbc_DOM01_ML_*.nc"
diff --git a/conf/real-from-ideal/jobs.yaml b/conf/real-from-ideal/jobs.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..63c0f201659f711ae5e56e17fcc723532408cf94
--- /dev/null
+++ b/conf/real-from-ideal/jobs.yaml
@@ -0,0 +1,163 @@
+# Example job with all options specified
+JOBS:
+  ## Job name
+  # JOBNAME:
+  ## Script to execute. If not specified, job will be omitted from workflow. "You can also specify additional files separated by a ",".
+  # Note: The post-processed additional_files will be sent to %HPCROOT%/LOG_%EXPID%
+  ## Path relative to the project directory
+  # FILE:
+  ## Platform to execute the job. If not specified, defaults to HPCARCH in expedf file.
+  ## LOCAL is always defined and refers to current machine
+  # PLATFORM:
+  ## Queue to add the job to. If not specified, uses PLATFORM default.
+  # QUEUE:
+  ## Defines dependencies from job as a list of parents jobs separated by spaces.
+  ## Dependencies to jobs in previous chunk, member o startdate, use -(DISTANCE)
+  # DEPENDENCIES:INI SIM-1 CLEAN-2
+  ## Define if jobs runs once, once per stardate, once per member or once per chunk. Options: once, date, member, chunk.
+  ## If not specified, defaults to once
+  # RUNNING:once
+  ## Specifies that job has only to be run after X dates, members or chunk. A job will always be created for the last
+  ## If not specified, defaults to 1
+  # FREQUENCY:3
+  ## On a job with FREQUENCY > 1, if True, the dependencies are evaluated against all
+  ## jobs in the frequency interval, otherwise only evaluate dependencies against current
+  ## iteration.
+  ## If not specified, defaults to True
+  # WAIT:False
+  ## Defines if job is only to be executed in reruns. If not specified, defaults to false.
+  # RERUN_ONLY:False
+  ## Wallclock to be submitted to the HPC queue in format HH:MM
+  # WALLCLOCK:00:05
+
+  ## Processors number to be submitted to the HPC. If not specified, defaults to 1.
+  ## Wallclock chunk increase (WALLCLOCK will be increased according to the formula WALLCLOCK + WCHUNKINC * (chunk - 1)).
+  ## Ideal for sequences of jobs that change their expected running time according to the current chunk.
+  # WCHUNKINC: 00:01
+  # PROCESSORS: 1
+  ## Threads number to be submitted to the HPC. If not specified, defaults to 1.
+  # THREADS: 1
+  ## Enables hyper-threading. If not specified, defaults to false.
+  # HYPERTHREADING: false
+  ## Tasks number to be submitted to the HPC. If not specified, defaults to 1.
+  # Tasks: 1
+  ## Memory requirements for the job in MB
+  # MEMORY: 4096
+  ##  Number of retrials if a job fails. If not specified, defaults to the value given on experiment's autosubmit.yml
+  # RETRIALS: 4
+  ##  Allows to put a delay between retries, of retrials if a job fails. If not specified, it will be static
+  # DELAY_RETRY_TIME: 11
+  # DELAY_RETRY_TIME: +11 # will wait 11,22,33,44...
+  # DELAY_RETRY_TIME: *11 # will wait 11,110,1110,11110...
+  ## Some jobs can not be checked before running previous jobs. Set this option to false if that is the case
+  # CHECK: False
+  ## Select the interpreter that will run the job. Options: bash, python, r Default: bash
+  # TYPE: bash
+  ## Specify the path to the interpreter. If empty, use system default based on job type  . Default: empty
+  # EXECUTABLE: /my_python_env/python3
+  TRANSFER_PROJECT:
+    FILE: templates/common/transfer_project.sh
+    PLATFORM: LOCAL
+
+  BUILD_ICON:
+    FILE: templates/common/build_icon.sh
+    DEPENDENCIES: TRANSFER_PROJECT
+    WALLCLOCK: 01:00
+    PROCESSORS: 16
+
+  BUILD_PYTHON_ENVIRONMENT:
+    FILE: templates/common/build_python_environment.sh
+    # Right now we rely on spack for building icon and having a python interpreter, so we need this dependency:
+    DEPENDENCIES: BUILD_ICON
+    WALLCLOCK: 01:00
+    PROCESSORS: 16
+
+  PREPARE_EXPERIMENT:
+    FILE: templates/real-from-ideal/prepare_experiment.sh
+    DEPENDENCIES: BUILD_ICON
+    RUNNING: once
+    WALLCLOCK: 00:10
+    PLATFORM: LOGIN
+
+  PREPARE_IDEAL_DIRECTORY:
+    FILE: templates/real-from-ideal/prepare_ideal_directory.sh
+    DEPENDENCIES: PREPARE_EXPERIMENT
+    RUNNING: date
+    WALLCLOCK: 00:10
+    PLATFORM: LOGIN
+
+  PREPARE_IDEAL_NAMELIST:
+    FILE: templates/real-from-ideal/prepare_ideal_namelist.py
+    DEPENDENCIES: PREPARE_IDEAL_DIRECTORY BUILD_PYTHON_ENVIRONMENT TRANSFER_PROJECT
+    RUNNING: date
+    WALLCLOCK: 00:10
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+    PLATFORM: LOGIN
+
+  RUN_IDEAL:
+    FILE: templates/real-from-ideal/run_ideal.sh
+    DEPENDENCIES: PREPARE_IDEAL_NAMELIST
+    RUNNING: date
+    WALLCLOCK: 01:00
+
+  EXTPAR_FROM_IDEALIZED:
+    FILE: templates/real-from-ideal/extpar_from_idealized.py
+    DEPENDENCIES: RUN_IDEAL
+    RUNNING: date
+    WALLCLOCK: 01:00
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
+  FG_ANA_FROM_IDEALIZED:
+    FILE: templates/real-from-ideal/fg_ana_from_idealized.py
+    DEPENDENCIES: RUN_IDEAL
+    RUNNING: date
+    WALLCLOCK: 01:00
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
+
+  PREPARE_MEMBER:
+    FILE: templates/real-from-ideal/prepare_member.sh
+    DEPENDENCIES: FG_ANA_FROM_IDEALIZED EXTPAR_FROM_IDEALIZED
+    RUNNING: member
+    WALLCLOCK: 01:00
+    PLATFORM: LOGIN
+
+  PREPARE_CHUNK:
+    FILE: templates/real-from-ideal/prepare_chunk.py
+    DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
+    WALLCLOCK: 00:05
+    RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+    PLATFORM: LOGIN
+
+  RUN_ICON:
+    FILE: templates/common/run_icon.sh
+    DEPENDENCIES: PREPARE_CHUNK
+    WALLCLOCK: 01:00
+    RUNNING: chunk
+    PROCESSORS: 16
+
+  COMPRESS:
+    FILE: templates/common/compress.py
+    DEPENDENCIES: RUN_ICON COMPRESS-1 BUILD_PYTHON_ENVIRONMENT
+    RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
+  TRANSFER:
+    FILE: templates/common/transfer.sh
+    DEPENDENCIES: COMPRESS
+    WALLCLOCK: 00:10
+    RUNNING: member
+    PLATFORM: LOCAL
+
+  CLEAN:
+    FILE: templates/common/clean.sh
+    DEPENDENCIES: TRANSFER
+    WALLCLOCK: 00:10
+    RUNNING: member
+    PLATFORM: LOGIN
diff --git a/conf/real-from-ideal/proj.yaml b/conf/real-from-ideal/proj.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5585b798f1cefefa62a105e20e7f0936328d212c
--- /dev/null
+++ b/conf/real-from-ideal/proj.yaml
@@ -0,0 +1,39 @@
+spack:
+  url: git@gitlab.physik.uni-muenchen.de:LDAP_rbg/spack.git
+  branch: lmu/ubuntu20.04-icon
+  compiler: gcc@11.3.0
+
+icon:
+  version: 2.6.5-nwp0
+
+python_environment:
+  # Name of the virtual environment in the remote platform experiment folder
+  folder_name: python_environment
+  requirements:
+    # Because there's an issue with numba, for now we need to keep a specific version of numpy
+    - numpy==1.23
+    - enstools-compression
+    # Just to try a library from a git repository.
+    - git+https://gitlab.physik.uni-muenchen.de/Oriol.Tinto/otils.git
+    - f90nml
+
+simulation:
+  dynamics_grid_filename: icon_grid_0012_R02B04_G.nc
+  radiation_grid_filename: icon_grid_0011_R02B03_R.nc
+  external_parameters_filename: extpar_DOM01.nc
+  date_format: '%Y-%m-%dT%H:%M:%SZ'
+  namelist_paths:
+    # Path to the name lists
+    master: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_master.namelist"
+    atmosphere:
+      ideal: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_atmosphere_ideal.namelist"
+      real: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_atmosphere_real.namelist"
+
+
+  # List of output file names that will be copied (Wildcards * allowed)
+  output_file_names: "latbc_DOM01_ML_*.nc"
+  files_to_clean: "*.nc"
+
+data_management:
+  # Where do we put the output files afterwards?
+  local_destination_folder: /scratch/o/Oriol.Tinto/tmp/
diff --git a/namelists/icon_atmosphere_real-from-dwd-ana.namelist b/namelists/real-from-dwd-ana/icon_atmosphere.namelist
similarity index 100%
rename from namelists/icon_atmosphere_real-from-dwd-ana.namelist
rename to namelists/real-from-dwd-ana/icon_atmosphere.namelist
diff --git a/namelists/icon_master_real-from-dwd-ana.namelist b/namelists/real-from-dwd-ana/icon_master.namelist
similarity index 100%
rename from namelists/icon_master_real-from-dwd-ana.namelist
rename to namelists/real-from-dwd-ana/icon_master.namelist
diff --git a/namelists/real-from-ideal/icon_atmosphere_ideal.namelist b/namelists/real-from-ideal/icon_atmosphere_ideal.namelist
new file mode 100644
index 0000000000000000000000000000000000000000..094401dcf0df5a9d5e7132b43b1863ef99b6fd85
--- /dev/null
+++ b/namelists/real-from-ideal/icon_atmosphere_ideal.namelist
@@ -0,0 +1,69 @@
+&run_nml
+    ltestcase                   = .TRUE.        ! idealized testcase runs
+    dtime                       = 300           ! time step of 300 seconds
+    output                      = 'nml'         ! use output nameslists
+    msg_level                   = 15
+    num_lev                     = 31
+    lvert_nest                  = .false.
+    ldynamics                   = .true.
+    ltransport                  = .true.
+    ntracer                     = 5
+    iforcing                    = 3
+/
+
+&grid_nml
+    dynamics_parent_grid_id     = 0
+    dynamics_grid_filename      = '%dynamics_grid_filename%'
+    radiation_grid_filename     = '%radiation_grid_filename%'
+    lredgrid_phys               = .true.
+/
+
+&nh_testcase_nml
+    nh_test_name                = 'APE_nwp'   ! testcase selection
+    ape_sst_case                = 'sst_qobs'
+/
+
+&nonhydrostatic_nml
+    damp_height                 = 18000
+    rayleigh_coeff              = 0.75
+/
+
+&nwp_phy_nml
+    lupatmo_phy = .FALSE.
+    inwp_surface                = 0
+
+/
+
+&time_nml
+    dt_restart = '%checkpoint_time%'
+/
+
+&io_nml
+    dt_checkpoint = '%checkpoint_time%'
+/
+
+&parallel_nml
+    nproma                      = 16
+/
+
+! the following two output files are used to initialize the next run
+&output_nml
+    file_interval               = 'PT3600S'
+    output_start                = '%Chunk_START_DATE%'
+    output_end                  = '%Chunk_END_DATE%'
+    output_filename             = "init-test"
+    output_interval             = 'PT3600S'
+    include_last                = .true.
+    mode                        = 1
+    taxis_tunit                 = 1
+    ml_varlist                  = 'group:dwd_fg_atm_vars', 'group:dwd_fg_sfc_vars'
+/
+&output_nml
+    steps_per_file              = 1
+    output_start                = '%Chunk_START_DATE%'
+    output_end                  = '%Chunk_START_DATE%'
+    output_filename             = "init-test-ext"
+    include_last                = .true.
+    output_interval             = 'PT3600S'
+    ml_varlist                  = 'depth_lk', 'emis_rad', 'fr_lake', 'fr_land', 'topography_c', 'soiltyp', 'sso_stdh', 'sso_theta', 'sso_gamma', 'sso_sigma'
+/
diff --git a/namelists/real-from-ideal/icon_atmosphere_real.namelist b/namelists/real-from-ideal/icon_atmosphere_real.namelist
new file mode 100644
index 0000000000000000000000000000000000000000..35be207e602d6306b5c5db74d9588fed9378bc8e
--- /dev/null
+++ b/namelists/real-from-ideal/icon_atmosphere_real.namelist
@@ -0,0 +1,74 @@
+&run_nml
+    ltestcase                   = .false.
+    dtime                       =  300
+    output                      = 'nml'
+    msg_level                   = 15
+    num_lev                     = 31
+    lvert_nest                  = .false.
+    ldynamics                   = .true.
+    ltransport                  = .true.
+    ntracer                     = 5
+    iforcing                    = 3
+/
+
+&grid_nml
+    dynamics_parent_grid_id     = 0
+    dynamics_grid_filename      = '%dynamics_grid_filename%'
+    radiation_grid_filename     = '%radiation_grid_filename%'
+    lredgrid_phys               = .true.
+/
+
+&extpar_nml
+    itopo                       = 1
+    extpar_filename             = 'extpar_DOM01.nc'
+/
+
+&initicon_nml
+    init_mode                   = 1,
+    dwdfg_filename              = 'init-test-fg_DOM01_ML_0001.nc'
+    dwdana_filename             = 'init-test-ana_DOM01_ML_0001.nc'
+    lconsistency_checks         = .false.
+/
+
+&nonhydrostatic_nml
+    damp_height                 = 18000
+    rayleigh_coeff              = 0.75
+/
+
+&parallel_nml
+    nproma                      = 16
+/
+
+! LATBC files
+&output_nml
+    file_interval               = 'PT3600S'
+    output_start                = '%Chunk_START_DATE%'
+    output_end                  = '%Chunk_END_DATE%'
+    output_filename             = "latbc"
+    output_interval             = 'PT3600S'
+    include_last                = .true.
+    ml_varlist                  = 'u', 'v', 'w', 'theta_v', 'rho', 'qv', 'qc', 'qi', 'qr', 'qs', 'z_ifc'
+/
+
+! First Guess file
+&output_nml
+    file_interval               = 'PT3600S'
+    output_start                = '%Chunk_START_DATE%'
+    output_end                  = '%Chunk_START_DATE%'
+    output_filename             = "init"
+    output_interval             = 'PT3600S'
+    include_last                = .true.
+    ml_varlist                  = 'group:dwd_fg_atm_vars', 'group:dwd_fg_sfc_vars'
+/
+
+&time_nml
+    dt_restart = '%checkpoint_time%'
+/
+
+&io_nml
+    dt_checkpoint = '%checkpoint_time%'
+/
+
+&nwp_phy_nml
+    lupatmo_phy = .FALSE.
+/
\ No newline at end of file
diff --git a/namelists/real-from-ideal/icon_master.namelist b/namelists/real-from-ideal/icon_master.namelist
new file mode 100644
index 0000000000000000000000000000000000000000..fe401c4424536da7f88b7dd09bfc28b6d531b197
--- /dev/null
+++ b/namelists/real-from-ideal/icon_master.namelist
@@ -0,0 +1,16 @@
+&master_nml
+    lrestart                    = "%is_restart%"
+    lrestart_write_last         = .true.
+/
+
+&master_model_nml
+    model_type =                1                       ! atmospheric model
+    model_name =                "ATMO"                  ! name of this model component
+    model_namelist_filename =   "icon_atmosphere.namelist"
+/
+
+&master_time_control_nml
+    calendar                    = "proleptic gregorian"
+    experimentStartDate         = '%Chunk_START_DATE%'
+    experimentStopDate          = '%Chunk_END_DATE%'
+/
\ No newline at end of file
diff --git a/platforms/common/spack_utils.sh b/platforms/common/spack_utils.sh
new file mode 100644
index 0000000000000000000000000000000000000000..0650e618d850d432bd6f49962c5880d9807fb741
--- /dev/null
+++ b/platforms/common/spack_utils.sh
@@ -0,0 +1,27 @@
+function spack_env() {
+export SPACK_SETUP_ENV=spack/share/spack/setup-env.sh
+export SPACK_VENV=spack_icon_env
+export SPACK_USER_CACHE_PATH=${WORKDIR}/SPACK_USER_CACHE_PATH
+export SPACK_DISABLE_LOCAL_CONFIG=true
+}
+
+function install_spack() {
+spack_env
+
+#TODO: Would be good to enable the re-utilization of existing spack packages (via packages.yaml or upstreams.yaml)
+if [ ! -f ${SPACK_SETUP_ENV} ]; then
+  git clone ${SPACK_URL} -b ${SPACK_BRANCH}
+fi
+
+. ${SPACK_SETUP_ENV}
+spack env create $SPACK_VENV
+spack env activate -p $SPACK_VENV
+spack compiler find
+}
+
+function load_spack() {
+spack_env
+if [ ! -f ${SPACK_SETUP_ENV} ]; then install_spack; fi
+. ${SPACK_SETUP_ENV}
+spack env activate -p $SPACK_VENV
+}
diff --git a/templates/common/build_icon.sh b/templates/common/build_icon.sh
index e1db87552109fb6b78a7b6cb1a300c1a33ab23bd..29442e138ca163b608b9fd294d2c414a036ada42 100644
--- a/templates/common/build_icon.sh
+++ b/templates/common/build_icon.sh
@@ -14,24 +14,28 @@ fi
 # Go to the working directory
 cd ${WORKDIR}
 
-# Check if experiment's spack installation already exists, if it doesn't, clone it.
-SPACK_ENV=spack/share/spack/setup-env.sh
-if [ ! -f ${SPACK_ENV} ]; then
-  git clone ${SPACK_URL} -b ${SPACK_BRANCH}
-  #TODO: Would be good to enable the re-utilization of existing spack packages (via packages.yaml or upstreams.yaml)
-fi
-
-# Setup the environment
-source ${SPACK_ENV}
+. ${WORKDIR}/proj/platforms/common/spack_utils.sh
+load_spack
 
 if [ $(
   spack find icon-nwp@${ICON_VERSION} &>/dev/null
   echo $?
 ) -ne 0 ]; then
   echo "Installing icon-nwp@${ICON_VERSION}."
-  # Compile openmpi with schedulers=slurm
-  spack install openmpi%${SPACK_COMPILER}+legacylaunchers schedulers=slurm
-  spack install icon-nwp@${ICON_VERSION}%${SPACK_COMPILER} ^openmpi%${SPACK_COMPILER}
+
+  if [[ $(spack compiler info ${SPACK_COMPILER}) ]]; then
+    echo "Found Compiler"
+  else
+    echo "could not find compiler, try to install it... this may take a while"
+    spack add ${SPACK_COMPILER}
+    spack install
+    spack compiler add $(spack location --install-dir $SPACK_COMPILER)
+  fi
+
+  spack add ucx%${SPACK_COMPILER}+dc+dm+ib_hw_tm+mlx5_dv+rc+rdmacm+thread_multiple+ud+verbs~cuda
+  spack add openmpi%${SPACK_COMPILER}+pmi+legacylaunchers~cuda schedulers=slurm fabrics=ucx
+  spack add icon-nwp@${ICON_VERSION}%${SPACK_COMPILER} ^openmpi%${SPACK_COMPILER}
+  spack install
 else
   echo "icon-nwp@${ICON_VERSION} already installed!"
 fi
diff --git a/templates/common/clean.sh b/templates/common/clean.sh
index 6b19b8dca823a5a26a52d4781955dcd9c27508d8..843123fb7bfd9bc8a55691ec527a0b8217ce3568 100644
--- a/templates/common/clean.sh
+++ b/templates/common/clean.sh
@@ -4,18 +4,19 @@
 WORKDIR=%HPCROOTDIR%
 STARTDATE=%SDATE%
 MEMBER=%MEMBER%
-FILES_TO_CLEAN="%simulation.FILES_TO_CLEAN%"
+FILES_TO_CLEAN='%simulation.FILES_TO_CLEAN%'
+echo "${FILES_TO_CLEAN}"
 
 # Define run directory
 RUNDIR=${WORKDIR}/${STARTDATE}/${MEMBER}
 
+cd ${RUNDIR} || exit
+# Remove the files in the run directory
+rm -f ${FILES_TO_CLEAN}
 
-# Copy the output files
-for file_name in ${FILES_TO_CLEAN}; do
-  rm ${RUNDIR}/${file_name}
-done
 
-# Remove the output files from the remote machine
+# Remove the files in the remote output directory
 OUTPUT_DIR=${WORKDIR}/output/${STARTDATE}/${MEMBER}
 
-rm ${OUTPUT_DIR}/*
\ No newline at end of file
+cd ${OUTPUT_DIR} || exit
+rm -f ${FILES_TO_CLEAN}
diff --git a/templates/common/compress.py b/templates/common/compress.py
index af0c18dc511704deee05a7f8a905d10dbc19ad3a..996370c2f14f7443200b88b8ae04f825d0603893 100644
--- a/templates/common/compress.py
+++ b/templates/common/compress.py
@@ -17,7 +17,7 @@ def compress_outputs():
     WORKDIR = Path("%HPCROOTDIR%")
     STARTDATE = "%SDATE%"
     MEMBER = "%MEMBER%"
-    output_file_names = "%simulation.OUTPUT_FILES%"
+    output_file_names = "%simulation.output_file_namesº%"
 
     # Define rundir
     RUNDIR = WORKDIR / STARTDATE / MEMBER
diff --git a/templates/common/transfer_namelists.sh b/templates/common/transfer_project.sh
similarity index 54%
rename from templates/common/transfer_namelists.sh
rename to templates/common/transfer_project.sh
index 0dbd9038b606d66f1d6d4f6afe06a3a7c515307f..f954d36cbabae54c4bb941df847ac77f195ac25d 100644
--- a/templates/common/transfer_namelists.sh
+++ b/templates/common/transfer_project.sh
@@ -7,7 +7,8 @@ HPCHOST=%HPCHOST%
 
 # Define local and remote namelists folders
 REMOTE_WORKDIR=${WORKDIR}/
-LOCAL_NAMELISTS_FOLDER="%PROJDIR%/namelists"
+PROJ_FOLDER="%PROJDIR%"
 
-# Transfer the namelists
-rsync -v -u -r --no-relative ${LOCAL_NAMELISTS_FOLDER} ${HPCUSER}@${HPCHOST}:${REMOTE_WORKDIR}
+# Transfer the project
+ssh ${HPCUSER}@${HPCHOST} mkdir -p ${REMOTE_WORKDIR}/proj
+rsync -v -u -r --no-relative ${PROJ_FOLDER}/ ${HPCUSER}@${HPCHOST}:${REMOTE_WORKDIR}/proj
diff --git a/templates/real-from-ideal/extpar_from_idealized.py b/templates/real-from-ideal/extpar_from_idealized.py
new file mode 100755
index 0000000000000000000000000000000000000000..17702784c82ca97613dfa345c5b758954bd6c023
--- /dev/null
+++ b/templates/real-from-ideal/extpar_from_idealized.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python3
+#################################################################################################################
+# I'm reusing an old script with a command line interface, so basically I'm providing the arguments here based on
+# some autosubmit variables
+from pathlib import Path
+# Get some autosubmit variables
+WORKDIR = "%HPCROOTDIR%"
+STARTDATE = "%SDATE%"
+RUNDIR = Path(f"{WORKDIR}/{STARTDATE}/ideal")
+arguments = ["--const", f"{RUNDIR.as_posix()}/init-test-ext_DOM01_ML_0001.nc",
+             "--out", f"{RUNDIR.as_posix()}/extpar_DOM01.nc"]
+##################################################################################################################
+
+"""
+create an extpar file from an idealized modelrun in order to restart it as real-data case.
+"""
+
+import argparse
+import logging
+
+
+import xarray
+import numpy
+from numba import jit
+from enstools.io import read, write
+
+
+def uppercase_add_variables(extpar):
+    """
+    ICON wants to read uppercase variable names
+
+    Parameters
+    ----------
+    extpar: xarray.Dataset
+            the output dataset
+    """
+    names = list(extpar.variables)
+    lowercase_names = ["topography_c"]
+    mapping = {}
+    for name in names:
+        if name not in lowercase_names:
+            mapping[name] = name.upper()
+    extpar = extpar.rename(mapping)
+    return extpar
+
+
+def add_LU_CLASS_FRACTION(extpar, lu_class):
+    """
+    Add a constant land use class to all land grid points
+
+    Parameters
+    ----------
+    extpar: xarray.Dataset
+            the output dataset
+
+    lu_class: int
+            number of the landuse class to be used for all grid points on land
+
+    """
+    fractions = xarray.DataArray(numpy.zeros((23, extpar.dims["ncells"]), dtype=numpy.float32),
+                                 dims=("nclass_lu", "ncells"),
+                                 name="LU_CLASS_FRACTION",
+                                 attrs={"standard_name": "Landuse class fraction",
+                                        "long_name": "Fraction of land use classes in target grid element",
+                                        "CDI_grid_type": "unstructured"})
+
+    @jit(nopython=True)
+    def fill_fractions(fractions, lu_class, fr_land):
+        """
+        JIT-Compiled function that created the fractions
+
+        Parameters
+        ----------
+        fractions: numpy.ndarray
+        lu_class: int
+        fr_land: numpy.ndarray
+        """
+        for cell in range(fr_land.shape[0]):
+            if fr_land[cell] > 0:
+                fractions[lu_class][cell] = fr_land[cell]
+            # the remainder to one is always water
+            fractions[20][cell] = 1.0 - fractions[lu_class][cell]
+
+    fill_fractions(fractions.data, lu_class, extpar["FR_LAND"].values)
+
+    logging.info("added generated LU_CLASS_FRACTION to extpar.")
+    extpar["LU_CLASS_FRACTION"] = fractions
+
+
+def add_const(extpar, name, value, only_land=True, standard_name=None, long_name=None, units=None, dtype=numpy.float32,
+              monthly=False):
+    """
+    fill in constant values over land.
+    """
+    # variable is already there?
+    if name in extpar:
+        logging.info(f"{name} already in input file, no new variable create")
+        return
+
+    # create time-dimension for monthly values
+    if monthly and "time" not in extpar:
+        logging.info("adding time variable to extpar.")
+        time = xarray.DataArray(numpy.empty(12, dtype=numpy.float32),
+                                dims="time")
+        time.values[:] = numpy.arange(11110111, 11111311, 100)
+        extpar["time"] = time
+
+    # dimensions for new array
+    if monthly:
+        new_shape = (12, extpar.dims["ncells"])
+        new_dims = ("time", "ncells")
+    else:
+        new_shape = (extpar.dims["ncells"],)
+        new_dims = ("ncells",)
+
+    # create new array
+    new = xarray.DataArray(numpy.empty(new_shape, dtype=dtype),
+                           dims=new_dims,
+                           name=name,
+                           attrs={"CDI_grid_type": "unstructured"})
+    if standard_name is not None:
+        new.attrs["standard_name"] = standard_name
+    if long_name is not None:
+        new.attrs["long_name"] = long_name
+    if units is not None:
+        new.attrs["units"] = units
+
+    # fill in values
+    if only_land:
+        if monthly:
+            for m in range(12):
+                new.values[m, :] = numpy.where(extpar["FR_LAND"] == 0, 0, value)
+        else:
+            new.values[:] = numpy.where(extpar["FR_LAND"] == 0, 0, value)
+    else:
+        new.values[:] = value
+
+    logging.info(f"added generated {name} to extpar.")
+    extpar[name] = new
+
+
+def convert_SOILTYP(extpar):
+    """
+    SOILTYPE is supposed to be an 32bit Integer
+
+    Parameters
+    ----------
+    extpar: xarray.Dataset
+            the output dataset
+    """
+    if extpar["SOILTYP"].dtype == numpy.int32:
+        logging.info("SOILTYP has already data type integer.")
+
+    soiltyp = xarray.DataArray(extpar["SOILTYP"].values.astype(numpy.int32),
+                               dims=("ncells"),
+                               name="SOILTYP",
+                               attrs={"standard_name": "soil_type",
+                                      "long_name": "soil type"})
+
+    extpar["SOILTYP"] = soiltyp
+    logging.info("changed datatype of soiltype to int32.")
+
+
+def remove_unsed(extpar):
+    """
+    remove unsed variables from the file
+
+    Parameters
+    ----------
+    extpar
+    """
+    unused = ["CLON", "CLAT", "CLON_BNDS", "CLAT_BNDS"]
+    for var in unused:
+        if var in extpar:
+            del extpar[var]
+            logging.info(f"removed unsed variable {var} from extpar")
+
+
+def copy_uuid(extpar, grid_file):
+    """
+    read the uuid argument from the grid file
+
+    Parameters
+    ----------
+    extpar: xarray.Dataset
+    grid_file
+    """
+    attrs_to_copy = ["uuidOfHGrid", "grid_file_uri", "number_of_grid_used", "uuidOfParHGrid", "ICON_grid_file_uri"]
+    grid = read(grid_file)
+    for attr in attrs_to_copy:
+        if attr in grid.attrs:
+            extpar.attrs[attr] = grid.attrs[attr]
+
+
+if __name__ == "__main__":
+    # parse command line arguments
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("--const", required=True, help="""file with constant variables written ba a previous icon run. 
+                                                       Expected are: 'depth_lk', 'emis_rad', 'fr_lake', 'fr_land', 
+                                                       'topography_c', 'soiltyp', 'sso_stdh', 'sso_theta', 'sso_gamma', 
+                                                       'sso_sigma'.""")
+    parser.add_argument("--lu-class", type=int, default=0, help="""Land use class used for land grid points. 
+                                           If not given, all gridpoints are set to class 0""")
+    parser.add_argument("--ndvi-max", type=float, default=0.5, help="""NDVI_MAX for Land-Grid-points.""")
+    parser.add_argument("--grid-file", required=False, help="if given, the UUID is read from this file.")
+    parser.add_argument("--out", required=True, help="Name of the output file")
+    args = parser.parse_args(arguments)
+
+    # read const input file
+    extpar = read(args.const)
+    extpar = uppercase_add_variables(extpar)
+
+    # create variable LU_CLASS_FRACTION
+    add_LU_CLASS_FRACTION(extpar, args.lu_class)
+
+    # change datatype of SOILTYP
+    convert_SOILTYP(extpar)
+
+    # add constant values over land for land use and type related variables
+    add_const(extpar, "NDVI_MAX", args.ndvi_max,
+              standard_name="normalized_difference_vegetation_index",
+              long_name="Constant NDVI over land")
+    add_const(extpar, "T_CL", 287.0,
+              standard_name="soil_temperature",
+              long_name="Constant Values for Soil temperature")
+    add_const(extpar, "PLCOV_MX", 0.6,
+              standard_name="vegetation_area_fraction_vegetation_period",
+              long_name="Constant Values for Plant cover maximum due to land use data")
+    add_const(extpar, "LAI_MX", 2.73,
+              standard_name="leaf_area_index_vegetation_period",
+              long_name="Constant Values for Leaf Area Index Maximum")
+    add_const(extpar, "ROOTDP", 0.73,
+              standard_name="root_depth",
+              long_name="Constant Values for Root depth",
+              units="m")
+    add_const(extpar, "RSMIN", 215.0,
+              standard_name="RSMIN",
+              long_name="Constant Values for Minimal stomata resistence",
+              units="s/m")
+    add_const(extpar, "FOR_D", 0.06,
+              standard_name="fraction_of_deciduous_forest_cover",
+              long_name="Constant values for Fraction of deciduous forest")
+    add_const(extpar, "FOR_E", 0.15,
+              standard_name="fraction_of_evergreen_forest_cover",
+              long_name="Constant values for Fraction of evergreen forest")
+    add_const(extpar, "ICE", 0.1,
+              standard_name="Ice fraction",
+              long_name="Constant values for Ice fraction due to Land Use Data")
+    add_const(extpar, "NDVI_MRAT", 0.78,
+              standard_name="normalized_difference_vegetation_index",
+              long_name="Constant values for (monthly) proportion of actual value/maximum normalized differential vegetation index",
+              monthly=True)
+    # add_const(extpar, "", 0.6,
+    #          standard_name="",
+    #          long_name="")
+
+    # copy uuid from grid
+    if args.grid_file is not None:
+        copy_uuid(extpar, args.grid_file)
+
+    # write extpar-file
+    remove_unsed(extpar)
+    logging.info(f"writing {args.out}...")
+    # Icon requires the attribute "rawdata" to be set to either "GLC2000" or "GLOBCOVER2009"
+    # It was discussed in the following issue: https://gitlab.physik.uni-muenchen.de/w2w/icon-examples/-/issues/2
+    extpar.attrs["rawdata"] = "GLC2000"
+    write(extpar, args.out)
+    # extpar.to_netcdf(args.out)
diff --git a/templates/real-from-ideal/fg_ana_from_idealized.py b/templates/real-from-ideal/fg_ana_from_idealized.py
new file mode 100755
index 0000000000000000000000000000000000000000..29b5d0963a9e928b47173793cf90ea926c9679d8
--- /dev/null
+++ b/templates/real-from-ideal/fg_ana_from_idealized.py
@@ -0,0 +1,236 @@
+#!/usr/bin/env python3
+#################################################################################################################
+# I'm reusing an old script with a command line interface, so basically I'm providing the arguments here based on
+# some autosubmit variables
+from pathlib import Path
+
+# Get some autosubmit variables
+WORKDIR = "%HPCROOTDIR%"
+STARTDATE = "%SDATE%"
+RUNDIR = Path(f"{WORKDIR}/{STARTDATE}/ideal")
+arguments = ["--input", f"{RUNDIR.as_posix()}/init-test_DOM01_ML_0001.nc",
+             "--out-fg", f"{RUNDIR.as_posix()}/init-test-fg_DOM01_ML_0001.nc",
+             "--out-ana", f"{RUNDIR.as_posix()}/init-test-ana_DOM01_ML_0001.nc",
+             ]
+
+##################################################################################################################
+
+"""
+Add missing fields to a FG file
+"""
+import argparse
+import logging
+import xarray
+import numpy
+from enstools.io import read, write
+
+
+def add_variable(fg, name, value=0, standard_name=None, long_name=None, dtype=numpy.float64, units=None, param=None,
+                 over_land=False):
+    """
+
+    Parameters
+    ----------
+    value: float or list of floats
+            value to assign to a variable
+
+    fg: xarray.Dataset
+            full content of the file written by ICON
+
+    name: str
+            name of the variable to create
+
+    standard_name: str
+    long_name: str
+    """
+    # variable is already there?
+    if name in fg:
+        logging.info(f"{name} already in input file, no new variable create")
+        return
+
+    # construct the shape based on 2D or not
+    if not isinstance(value, list):
+        new_shape = (fg.dims["time"], fg["fr_land"].shape[0])
+        new_dims = ("time", fg["fr_land"].dims[0])
+    else:
+        if name == "w_so" or name == "w_so_ice":
+            # create vertical coordinate
+            if "depth_2" not in fg.coords:
+                depth_2 = xarray.DataArray(numpy.asarray([5.0, 20.0, 60.0, 180.0, 540.0, 1620.0, 4860.0, 14580.0]),
+                                           attrs={"units": "mm",
+                                                  "long_name": "depth_below_land",
+                                                  "axis": "Z",
+                                                  "bounds": "depth_2_bnds"},
+                                           name="depth_2",
+                                           dims="depth_2")
+                depth_2_bnds = xarray.DataArray(numpy.asarray([[0, 10],
+                                                               [10, 30],
+                                                               [30, 90],
+                                                               [90, 270],
+                                                               [270, 810],
+                                                               [810, 2430],
+                                                               [2430, 7290],
+                                                               [7290, 21870]], dtype=numpy.float64),
+                                                name="depth_2_bnds",
+                                                dims=("depth_2", "bnds"))
+                fg["depth_2_bnds"] = depth_2_bnds
+                fg.coords["depth_2"] = depth_2
+                logging.info("added coordinate depth_2 to first guess.")
+            new_shape = (fg.dims["time"], fg.dims["depth_2"], fg["fr_land"].shape[0])
+            new_dims = ("time", "depth_2", fg["fr_land"].dims[0])
+            if fg.dims["depth_2"] != len(value):
+                logging.error(f"wrong number of values given for {name}: {len(value)} instead of {fg.dims['depth_2']}")
+
+        elif name == "t_so":
+            if "depth" not in fg.coords:
+                depth = xarray.DataArray(numpy.asarray([0.0, 5.0, 20.0, 60.0, 180.0, 540.0, 1620.0, 4860.0, 14580.0]),
+                                         attrs={"units": "mm",
+                                                "long_name": "depth_below_land",
+                                                "axis": "Z",
+                                                "bounds": "depth_2_bnds"},
+                                         name="depth",
+                                         dims="depth")
+                fg.coords["depth"] = depth
+                logging.info("added coordinate depth to first guess.")
+            new_shape = (fg.dims["time"], fg.dims["depth"], fg["fr_land"].shape[0])
+            new_dims = ("time", "depth", fg["fr_land"].dims[0])
+            if fg.dims["depth"] != len(value):
+                logging.error(f"wrong number of values given for {name}: {len(value)} instead of {fg.dims['depth']}")
+
+        else:
+            logging.error(f"unsupported 3d variable: {name}")
+            exit(-1)
+
+    # create new array
+    new = xarray.DataArray(numpy.empty(new_shape, dtype=dtype),
+                           dims=new_dims,
+                           name=name,
+                           attrs={"CDI_grid_type": "unstructured"})
+    if standard_name is not None:
+        new.attrs["standard_name"] = standard_name
+    if long_name is not None:
+        new.attrs["long_name"] = long_name
+    if units is not None:
+        new.attrs["units"] = units
+    if param is not None:
+        new.attrs["param"] = param
+    new.attrs["number_of_grid_in_reference"] = fg["w"].attrs["number_of_grid_in_reference"]
+    if not isinstance(value, list):
+        if isinstance(value, str):
+            new.values[:] = fg[value].values
+        else:
+            if over_land:
+                new.values[:] = numpy.where(fg["fr_land"] == 0, 0, value)
+            else:
+                new.values[:] = value
+    else:
+        for ivalue, one_value in enumerate(value):
+            if over_land:
+                new.values[:, ivalue, :] = numpy.where(fg["fr_land"] == 0, 0, one_value)
+            else:
+                new.values[:, ivalue, :] = one_value
+
+    logging.info(f"added generated {name} to first guess.")
+    fg[name] = new
+
+
+def write_output(input, fg_name, ana_name):
+    """
+    ICON expects some variables from the FG file and other from ANA. Here we split up the input
+
+    Parameters
+    ----------
+    input: xarray.Dataset
+    fg_name
+    ana_name
+    """
+    logging.info("splitting input in FG and ANA")
+    ana_variables = ["pres", "temp", "qv", "u", "v", "freshsnow", "fr_ice", "h_ice",
+                     "h_snow", "t_ice", "t_seasfc", "t_snow", "w_so"]
+
+    # create a copy of the input dataset. all none-ana-variables will be removed
+    ana = input.copy()
+
+    # loop ober all variables. Remove ana from fg and fg from ana
+    vars = list(input.variables)
+    for var in vars:
+        if var in ana_variables:
+            del input[var]
+        elif var not in input.coords and not var.startswith("height") and not var.startswith("depth"):
+            del ana[var]
+
+    # write the output
+    logging.info(f"writting {fg_name}")
+    write(input, fg_name)
+    logging.info(f"writting {ana_name}")
+    write(ana, ana_name)
+
+
+if __name__ == "__main__":
+    # parse command line arguments
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("--input", required=True, help="ICON output with almost all FG variables")
+    parser.add_argument("--out-fg", required=True, help="output file for first guess variables.")
+    parser.add_argument("--out-ana", required=True, help="output file for analysis variables.")
+
+    args = parser.parse_args(arguments)
+
+    # read input file
+    input = read(args.input)
+
+    # add freshsnow
+    add_variable(input, "t_seasfc",
+                 standard_name="t_seasfc",
+                 long_name="Sea surface temperature initialized by 2m temparature",
+                 param="0.3.10",
+                 value="t_g")
+    add_variable(input, "freshsnow",
+                 standard_name="freshsnow",
+                 long_name="weighted indicator for age of snow in top of snow layer",
+                 param="203.1.0")
+    add_variable(input, "w_snow",
+                 standard_name="w_snow",
+                 long_name="weighted water equivalent of snow",
+                 param="60.1.0")
+    add_variable(input, "t_snow",
+                 standard_name="t_snow",
+                 long_name="weighted temperature of the snow-surface",
+                 param="18.0.0")
+    add_variable(input, "h_snow",
+                 standard_name="h_snow",
+                 long_name="snow depth",
+                 param="11.1.0")
+    add_variable(input, "rho_snow",
+                 standard_name="rho_snow",
+                 long_name="weighted snow density",
+                 param="61.1.0")
+    add_variable(input, "w_i",
+                 standard_name="w_i",
+                 long_name="weighted water content of interception water",
+                 param="13.0.2")
+    add_variable(input, "w_so",
+                 standard_name="w_so",
+                 long_name="total water content (ice + liquid water)",
+                 param="13.0.2",
+                 value=[100, 102, 107, 125, 176, 330, 990, 2900],
+                 units="kg m-2",
+                 over_land=True)
+    add_variable(input, "w_so_ice",
+                 standard_name="w_so",
+                 long_name="total water content (ice + liquid water)",
+                 param="22.3.2",
+                 value=[0, 0, 0, 0, 0, 0, 0, 0],
+                 units="kg m-2",
+                 over_land=True)
+    add_variable(input, "t_so",
+                 standard_name="t_so",
+                 long_name="weighted soil temperature (main level)",
+                 param="18.3.2",
+                 value=[280, 280, 280, 280, 280, 281, 283, 283, 283],
+                 units="K",
+                 over_land=True)
+
+    # write the output file
+    write_output(input, args.out_fg, args.out_ana)
+    # logging.info(f"writting {args.out}")
+    # write(fg, args.out)
diff --git a/templates/real-from-ideal/prepare_chunk.py b/templates/real-from-ideal/prepare_chunk.py
new file mode 100644
index 0000000000000000000000000000000000000000..2322efdc917c6fd803b12884d40373807aa106e4
--- /dev/null
+++ b/templates/real-from-ideal/prepare_chunk.py
@@ -0,0 +1,92 @@
+import logging
+import re
+from datetime import datetime, timedelta
+from pathlib import Path
+
+import f90nml
+
+logger = logging.getLogger("prepare_chunk")
+logger.setLevel(logging.INFO)
+
+# Get some autosubmit variables
+WORKDIR = "%HPCROOTDIR%"
+STARTDATE = "%SDATE%"
+MEMBER = "%MEMBER%"
+CHUNK = "%CHUNK%"
+
+# Example of date format "2018-06-01T00:00:00Z"
+date_format = "%simulation.date_format%"
+
+START_YEAR = "%Chunk_START_YEAR%"
+START_MONTH = "%Chunk_START_MONTH%"
+START_DAY = "%Chunk_START_DAY%"
+START_HOUR = "%Chunk_START_HOUR%"
+
+END_YEAR = "%Chunk_END_YEAR%"
+END_MONTH = "%Chunk_END_MONTH%"
+END_DAY = "%Chunk_END_DAY%"
+END_HOUR = "%Chunk_END_HOUR%"
+
+Chunk_START_DATE = datetime(year=int(START_YEAR), month=int(START_MONTH), day=int(START_DAY), hour=int(START_HOUR))
+Chunk_END_DATE = datetime(year=int(END_YEAR), month=int(END_MONTH), day=int(END_DAY), hour=int(END_HOUR))
+
+# Compute difference in seconds
+checkpoint_time = int((Chunk_END_DATE - Chunk_START_DATE).total_seconds())
+
+# TODO: Is that really necessary?
+# Add 10 minutes to allow the model to write the restarts
+Chunk_END_DATE = Chunk_END_DATE + timedelta(minutes=10)
+# Get run directory
+RUNDIR = Path(f"{WORKDIR}/{STARTDATE}/{MEMBER}")
+
+# TODO: This is a bit ugly
+
+# Get some variable replacements from the proj.yml file through autosubmit
+variable_replacements = {
+    "dynamics_grid_filename": "%simulation.dynamics_grid_filename%",
+    "radiation_grid_filename": "%simulation.radiation_grid_filename%",
+    "external_parameters_filename": "%simulation.external_parameters_filename%",
+    "first_guess_filename": "init-test-fg_DOM01_ML_0001.nc",
+    "analysis_filename": "init-test-ana_DOM01_ML_0001.nc",
+    "Chunk_START_DATE": Chunk_START_DATE.strftime(date_format),
+    "Chunk_END_DATE": Chunk_END_DATE.strftime(date_format),
+    "is_restart": False if "%CHUNK%" == "1" else True,
+    "checkpoint_time": checkpoint_time,
+}
+
+
+def adapt_namelist(input_namelist: str, output_namelist: str):
+    input_namelist = Path(input_namelist)
+    output_namelist = Path(output_namelist)
+
+    namelist = f90nml.read(input_namelist.as_posix())
+    group_keys = [gk for gk in namelist]
+
+    for group in group_keys:
+        variable_keys = [vk for vk in namelist[group]]
+        for variable in variable_keys:
+            value = namelist[group][variable]
+            m = re.match(r"%(.*)%", str(value))
+            if m:
+                key = m.group(1)
+
+                if key not in variable_replacements:
+                    raise AssertionError(f"The namelist {input_namelist.as_posix()!r} contains the variable {key!r} "
+                                         f"which is not in the list of provided replacements:\n"
+                                         f"{[v for v in variable_replacements]}")
+                logger.info(f"Replacing {group}>{variable}:{key} with {variable_replacements[key]!r}")
+                namelist[group][variable] = variable_replacements[key]
+
+    f90nml.write(nml=namelist, nml_path=output_namelist.as_posix(), force=True)
+
+
+if __name__ == '__main__':
+    atmosphere_namelist_path = "%simulation.namelist_paths.atmosphere.real%"
+    master_namelist_path = "%simulation.namelist_paths.master%"
+
+    # Adapt atmosphere namelist
+    adapt_namelist(input_namelist=atmosphere_namelist_path,
+                   output_namelist=(RUNDIR / "icon_atmosphere.namelist").as_posix())
+    # Adapt master namelist
+    adapt_namelist(input_namelist=master_namelist_path,
+                   output_namelist=(RUNDIR / "icon_master.namelist").as_posix())
diff --git a/templates/real-from-ideal/prepare_experiment.sh b/templates/real-from-ideal/prepare_experiment.sh
new file mode 100644
index 0000000000000000000000000000000000000000..b25b40a9fa6eeca08222eb7fb8e40f15c7bf2901
--- /dev/null
+++ b/templates/real-from-ideal/prepare_experiment.sh
@@ -0,0 +1,40 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+DYNAMICS_GRID_FILENAME=%simulation.dynamics_grid_filename%
+RADIATION_GRID_FILE=%simulation.radiation_grid_filename%
+
+# Activate spack
+SPACK_ENV=${WORKDIR}/spack/share/spack/setup-env.sh
+source ${SPACK_ENV}
+
+# Load icon module needed to retrieve some data
+spack load icon-nwp@%ICON_VERSION%
+
+# Create a folder for the common inidata and go there
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+mkdir -p "${COMMON_INIDATA_FOLDER}"
+cd "${COMMON_INIDATA_FOLDER}" || exit
+
+# Download or copy required input files
+function download_file() {
+  URL=$1
+  FILE=${2:-$(basename "$URL")}
+  if [ ! -e "$FILE" ]; then
+    echo "Download $URL => $FILE"
+    wget -q "$URL" -O "$FILE"
+  fi
+}
+
+# Download grid files and external parameters
+BASEURL=http://icon-downloads.mpimet.mpg.de/grids/public/edzw
+download_file $BASEURL/${DYNAMICS_GRID_FILENAME}
+download_file $BASEURL/${RADIATION_GRID_FILE}
+
+# Link input for radiation
+ln -sf "${ICON_DATA_PATH}/rrtmg_lw.nc" .
+ln -sf "${ICON_DATA_PATH}/ECHAM6_CldOptProps.nc" .
+
+# Change permissions to read only.
+chmod 440 ./*
diff --git a/templates/real-from-ideal/prepare_ideal_directory.sh b/templates/real-from-ideal/prepare_ideal_directory.sh
new file mode 100644
index 0000000000000000000000000000000000000000..38631ac7fbe75951734aafc22479d96eecef2de0
--- /dev/null
+++ b/templates/real-from-ideal/prepare_ideal_directory.sh
@@ -0,0 +1,18 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+STARTDATE=%SDATE%
+
+# Common folder with data needed for all simulations
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+
+# Ideal folder
+IDEAL_DIR=${WORKDIR}/${STARTDATE}/ideal
+
+# Create member folder and go there
+mkdir -p ${IDEAL_DIR}
+cd ${IDEAL_DIR} || exit
+
+# Link all files from the common inidata folder and the common date folder
+ln -sf ${COMMON_INIDATA_FOLDER}/* .
diff --git a/templates/real-from-ideal/prepare_ideal_namelist.py b/templates/real-from-ideal/prepare_ideal_namelist.py
new file mode 100644
index 0000000000000000000000000000000000000000..e05e8d681ec489819930deabbbbd6804a6da065f
--- /dev/null
+++ b/templates/real-from-ideal/prepare_ideal_namelist.py
@@ -0,0 +1,86 @@
+import logging
+import re
+from datetime import datetime, timedelta
+from pathlib import Path
+
+import f90nml
+
+logger = logging.getLogger("prepare_chunk")
+logger.setLevel(logging.INFO)
+
+# Get some autosubmit variables
+WORKDIR = "%HPCROOTDIR%"
+STARTDATE = "%SDATE%"
+
+# Example of date format "2018-06-01T00:00:00Z"
+date_format = "%simulation.date_format%"
+
+START_YEAR = "%Chunk_START_YEAR%"
+START_MONTH = "%Chunk_START_MONTH%"
+START_DAY = "%Chunk_START_DAY%"
+START_HOUR = "%Chunk_START_HOUR%"
+
+END_YEAR = "%Chunk_END_YEAR%"
+END_MONTH = "%Chunk_END_MONTH%"
+END_DAY = "%Chunk_END_DAY%"
+END_HOUR = "%Chunk_END_HOUR%"
+
+Chunk_START_DATE = datetime(year=int(START_YEAR), month=int(START_MONTH), day=int(START_DAY), hour=int(START_HOUR))
+Chunk_END_DATE = datetime(year=int(END_YEAR), month=int(END_MONTH), day=int(END_DAY), hour=int(END_HOUR))
+
+# Compute difference in seconds
+checkpoint_time = int((Chunk_END_DATE - Chunk_START_DATE).total_seconds())
+
+# TODO: Is that really necessary?
+# Add 10 minutes to allow the model to write the restarts
+Chunk_END_DATE = Chunk_END_DATE + timedelta(minutes=10)
+# Get run directory
+RUNDIR = Path(f"{WORKDIR}/{STARTDATE}/ideal")
+
+# Get some variable replacements from the proj.yml file through autosubmit
+variable_replacements = {
+    "dynamics_grid_filename": "%simulation.dynamics_grid_filename%",
+    "radiation_grid_filename": "%simulation.radiation_grid_filename%",
+    "external_parameters_filename": "%simulation.external_parameters_filename%",
+    "Chunk_START_DATE": Chunk_START_DATE.strftime(date_format),
+    "Chunk_END_DATE": Chunk_END_DATE.strftime(date_format),
+    "is_restart": False if "%CHUNK%" == "1" else True,
+    "checkpoint_time": checkpoint_time,
+}
+
+
+def adapt_namelist(input_namelist: str, output_namelist: str):
+    input_namelist = Path(input_namelist)
+    output_namelist = Path(output_namelist)
+
+    namelist = f90nml.read(input_namelist.as_posix())
+    group_keys = [gk for gk in namelist]
+
+    for group in group_keys:
+        variable_keys = [vk for vk in namelist[group]]
+        for variable in variable_keys:
+            value = namelist[group][variable]
+            m = re.match(r"%(.*)%", str(value))
+            if m:
+                key = m.group(1)
+
+                if key not in variable_replacements:
+                    raise AssertionError(f"The namelist {input_namelist.as_posix()!r} contains the variable {key!r} "
+                                         f"which is not in the list of provided replacements:\n"
+                                         f"{[v for v in variable_replacements]}")
+                logger.info(f"Replacing {group}>{variable}:{key} with {variable_replacements[key]!r}")
+                namelist[group][variable] = variable_replacements[key]
+
+    f90nml.write(nml=namelist, nml_path=output_namelist.as_posix(), force=True)
+
+
+if __name__ == '__main__':
+    atmosphere_namelist_path = "%simulation.namelist_paths.atmosphere.ideal%"
+    master_namelist_path = "%simulation.namelist_paths.master%"
+
+    # Adapt atmosphere namelist
+    adapt_namelist(input_namelist=atmosphere_namelist_path,
+                   output_namelist=(RUNDIR / "icon_atmosphere.namelist").as_posix())
+    # Adapt master namelist
+    adapt_namelist(input_namelist=master_namelist_path,
+                   output_namelist=(RUNDIR / "icon_master.namelist").as_posix())
diff --git a/templates/real-from-ideal/prepare_member.sh b/templates/real-from-ideal/prepare_member.sh
new file mode 100644
index 0000000000000000000000000000000000000000..4ff92170dcd983c428bc37f8cbcf055bc033b380
--- /dev/null
+++ b/templates/real-from-ideal/prepare_member.sh
@@ -0,0 +1,26 @@
+#!/bin/bash -l
+
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+STARTDATE=%SDATE%
+MEMBER=%MEMBER%
+
+# Common folder with data needed for all simulations
+COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
+
+# Member folder
+MEMBER_DIR=${WORKDIR}/${STARTDATE}/${MEMBER}
+
+# Create member folder and go there
+mkdir -p ${MEMBER_DIR}
+
+cd ${MEMBER_DIR} || exit
+
+
+# Link all files from the common inidata folder and the common date folder
+ln -sf ${COMMON_INIDATA_FOLDER}/* .
+
+# Link files from the ideal run.
+ln -sf ../ideal/extpar_DOM01.nc .
+ln -sf ../ideal/init-test-fg_DOM01_ML_0001.nc
+ln -sf ../ideal/init-test-ana_DOM01_ML_0001.nc
diff --git a/templates/real-from-ideal/run_ideal.sh b/templates/real-from-ideal/run_ideal.sh
new file mode 100644
index 0000000000000000000000000000000000000000..cec0251755487445e1b3a510023a5856d4912e88
--- /dev/null
+++ b/templates/real-from-ideal/run_ideal.sh
@@ -0,0 +1,23 @@
+# Get some variables provided by autosubmit.
+WORKDIR=%HPCROOTDIR%
+ICON_VERSION=%ICON_VERSION%
+
+STARTDATE=%SDATE%
+
+# Define rundir
+RUNDIR=${WORKDIR}/${STARTDATE}/ideal
+
+# Go to the ideal rundir
+cd ${RUNDIR}
+
+# Activate spack
+SPACK_ENV=${WORKDIR}/spack/share/spack/setup-env.sh
+source ${SPACK_ENV}
+# Load icon module
+spack load icon-nwp@%ICON_VERSION%
+
+# Set environment variable for eccodes-dwd definitions:
+source ${WORKDIR}/eccodes_defs.env
+
+# Run icon
+srun icon