diff --git a/conf/common/build.yml b/conf/common/build.yml
new file mode 100644
index 0000000000000000000000000000000000000000..9875df32417fb2fc99f5dd38a437bc3945d3eccf
--- /dev/null
+++ b/conf/common/build.yml
@@ -0,0 +1,33 @@
+spack:
+  init: ""                                                # command to load spack environment, e.g. module load spack,
+                                                          # use spack/setup-env.sh if empty
+  url: https://github.com/spack/spack.git                 # url to download spack if necessary
+  branch: develop                                         # if downloaded, branch name to use
+  externals: ""                                           # list of packages we try to find with spack external find
+                                                          # In LMU we need to include slurm to this list.
+  compiler: "gcc@12.2.0"                                  # desired compiler for spack
+  root: "%HPCROOTDIR%/spack"                              # path to a spack install, will be downloaded to if not present
+  user_cache_path: "%HPCROOTDIR%/spack_user_cache_path"   # spack puts data here when bootstrapping, leave empty to use home folder
+  user_config_path: "%HPCROOTDIR%/spack_user_config_path" # spack puts data here when bootstrapping, leave empty to use home folder
+  disable_local_config: false                             # if true, spack installs into spack source dir
+
+icon:
+  # The command that will be used to build icon-nwp with spack
+  build_cmd: "icon-nwp@%ICON.VERSION%% %SPACK.COMPILER%"
+  # In LRZ we used the following command to point to a specific versio of openmpi:
+  # build_cmd: "icon-nwp@%ICON.VERSION%% %SPACK.COMPILER% ^openmpi/amct7nx"
+  # In LMU we used the following command to build the appropriate a version of openmpi works with slurm:
+  # build_cmd: "icon-nwp@%ICON.VERSION%% %SPACK.COMPILER% ^openmpi+pmi+legacylaunchers schedulers=slurm fabrics=ucx ucx+dc+dm+ib_hw_tm+mlx5_dv+rc+rdmacm+thread_multiple+ud+verbs"
+  version: master # The latest release at the moment of creating this file was 2.6.5-nwp0
+
+python_environment:
+  # Name of the virtual environment in the remote platform experiment folder
+  folder_name: python_environment
+  python_version: "3.8:"   # In spack notation use x: to indicate at least x
+  requirements:
+    # Because there's an issue with numba, for now we need to keep a specific version of numpy
+    - numpy==1.23
+    # Using enstools-compression to compress the outputs.
+    - enstools-compression
+    # We are using the f90nml library to manipulate name lists.
+    - f90nml
diff --git a/conf/common/data_management.yml b/conf/common/data_management.yml
new file mode 100644
index 0000000000000000000000000000000000000000..c76fafea497d047f80bad9be3ed304e23014c7ae
--- /dev/null
+++ b/conf/common/data_management.yml
@@ -0,0 +1,3 @@
+data_management:
+  # Where do we put the output files afterwards?
+  local_destination_folder: /scratch/o/Oriol.Tinto/tmp/
diff --git a/conf/common/expdef.yml b/conf/common/expdef.yml
new file mode 100644
index 0000000000000000000000000000000000000000..6915db6f518af7ba566980711e723312317af7df
--- /dev/null
+++ b/conf/common/expdef.yml
@@ -0,0 +1,8 @@
+experiment:
+  DATELIST: 20201001
+  MEMBERS: "fc0"
+  CHUNKSIZEUNIT: hour
+  CHUNKSIZE: 12
+  NUMCHUNKS: 2
+  CHUNKINI: 0
+  CALENDAR: standard
\ No newline at end of file
diff --git a/conf/common/platforms.yml b/conf/common/platforms.yml
new file mode 100644
index 0000000000000000000000000000000000000000..a1ebe8ee80a73fa569066038761f60fab61f36ba
--- /dev/null
+++ b/conf/common/platforms.yml
@@ -0,0 +1,54 @@
+UserInformation:
+  LMU:
+    user: FILL_USER
+    host: FILL_HOST
+    project: FILL_PROJECT
+    scratch: FILL_SCRATCH
+  LRZ:
+    user: FILL_USER
+    host: FILL_HOST
+    project: FILL_PROJECT
+    scratch: FILL_SCRATCH
+
+Platforms:
+  LMU:
+    TYPE: slurm
+    HOST: %UserInformation.LMU.host%
+    PROJECT: %UserInformation.LMU.project%
+    USER: %UserInformation.LMU.project%
+    SCRATCH_DIR: %UserInformation.LMU.scratch%
+    ADD_PROJECT_TO_HOST: False
+    MAX_WALLCLOCK: '48:00'
+    TEMP_DIR: ''
+    CUSTOM_DIRECTIVES: "#SBATCH --export=ALL,OMPI_MCA_btl_tcp_if_include=10.0.0.0/8"
+
+  LMU_LOGIN:
+    TYPE: ps
+    HOST: %UserInformation.LMU.host%
+    PROJECT: %UserInformation.LMU.project%
+    USER: %UserInformation.LMU.project%
+    SCRATCH_DIR: %UserInformation.LMU.scratch%
+    ADD_PROJECT_TO_HOST: False
+    MAX_WALLCLOCK: '48:00'
+    TEMP_DIR: ''
+
+  LRZ:
+    TYPE: slurm
+    HOST: %UserInformation.LRZ.host%
+    PROJECT: %UserInformation.LRZ.project%
+    USER: %UserInformation.LRZ.project%
+    SCRATCH_DIR: %UserInformation.LRZ.scratch%
+    ADD_PROJECT_TO_HOST: False
+    MAX_WALLCLOCK: '48:00'
+    TEMP_DIR: ''
+
+  LRZ_LOGIN:
+    TYPE: ps
+    HOST: %UserInformation.LRZ.host%
+    PROJECT: %UserInformation.LRZ.project%
+    USER: %UserInformation.LRZ.project%
+    SCRATCH_DIR: %UserInformation.LRZ.scratch%
+    ADD_PROJECT_TO_HOST: False
+    MAX_WALLCLOCK: '48:00'
+    TEMP_DIR: ''
+
diff --git a/conf/real-from-dwd-ana/jobs.yaml b/conf/real-from-dwd-ana/jobs.yaml
deleted file mode 100644
index ba4cfc84bd81dcfda2ed0849fe8fdc2a701d41bc..0000000000000000000000000000000000000000
--- a/conf/real-from-dwd-ana/jobs.yaml
+++ /dev/null
@@ -1,133 +0,0 @@
-# Example job with all options specified
-JOBS:
-  ## Job name
-  # JOBNAME:
-  ## Script to execute. If not specified, job will be omitted from workflow. "You can also specify additional files separated by a ",".
-  # Note: The post-processed additional_files will be sent to %HPCROOT%/LOG_%EXPID%
-  ## Path relative to the project directory
-  # FILE:
-  ## Platform to execute the job. If not specified, defaults to HPCARCH in expedf file.
-  ## LOCAL is always defined and refers to current machine
-  # PLATFORM:
-  ## Queue to add the job to. If not specified, uses PLATFORM default.
-  # QUEUE:
-  ## Defines dependencies from job as a list of parents jobs separated by spaces.
-  ## Dependencies to jobs in previous chunk, member o startdate, use -(DISTANCE)
-  # DEPENDENCIES:INI SIM-1 CLEAN-2
-  ## Define if jobs runs once, once per stardate, once per member or once per chunk. Options: once, date, member, chunk.
-  ## If not specified, defaults to once
-  # RUNNING:once
-  ## Specifies that job has only to be run after X dates, members or chunk. A job will always be created for the last
-  ## If not specified, defaults to 1
-  # FREQUENCY:3
-  ## On a job with FREQUENCY > 1, if True, the dependencies are evaluated against all
-  ## jobs in the frequency interval, otherwise only evaluate dependencies against current
-  ## iteration.
-  ## If not specified, defaults to True
-  # WAIT:False
-  ## Defines if job is only to be executed in reruns. If not specified, defaults to false.
-  # RERUN_ONLY:False
-  ## Wallclock to be submitted to the HPC queue in format HH:MM
-  # WALLCLOCK:00:05
-
-  ## Processors number to be submitted to the HPC. If not specified, defaults to 1.
-  ## Wallclock chunk increase (WALLCLOCK will be increased according to the formula WALLCLOCK + WCHUNKINC * (chunk - 1)).
-  ## Ideal for sequences of jobs that change their expected running time according to the current chunk.
-  # WCHUNKINC: 00:01
-  # PROCESSORS: 1
-  ## Threads number to be submitted to the HPC. If not specified, defaults to 1.
-  # THREADS: 1
-  ## Enables hyper-threading. If not specified, defaults to false.
-  # HYPERTHREADING: false
-  ## Tasks number to be submitted to the HPC. If not specified, defaults to 1.
-  # Tasks: 1
-  ## Memory requirements for the job in MB
-  # MEMORY: 4096
-  ##  Number of retrials if a job fails. If not specified, defaults to the value given on experiment's autosubmit.yml
-  # RETRIALS: 4
-  ##  Allows to put a delay between retries, of retrials if a job fails. If not specified, it will be static
-  # DELAY_RETRY_TIME: 11
-  # DELAY_RETRY_TIME: +11 # will wait 11,22,33,44...
-  # DELAY_RETRY_TIME: *11 # will wait 11,110,1110,11110...
-  ## Some jobs can not be checked before running previous jobs. Set this option to false if that is the case
-  # CHECK: False
-  ## Select the interpreter that will run the job. Options: bash, python, r Default: bash
-  # TYPE: bash
-  ## Specify the path to the interpreter. If empty, use system default based on job type  . Default: empty
-  # EXECUTABLE: /my_python_env/python3
-
-  TRANSFER_PROJECT:
-    FILE: templates/common/transfer_project.sh
-    PLATFORM: LOCAL
-
-  BUILD_ICON:
-    FILE: templates/common/build_icon.sh
-    DEPENDENCIES: TRANSFER_PROJECT
-    WALLCLOCK: 04:00
-    PROCESSORS: 16
-
-  BUILD_PYTHON_ENVIRONMENT:
-    FILE: templates/common/build_python_environment.sh
-    # Right now we rely on spack for building icon and having a python interpreter, so we need this dependency:
-    DEPENDENCIES: BUILD_ICON
-    WALLCLOCK: 01:00
-    PROCESSORS: 16
-
-  PREPARE_EXPERIMENT:
-    FILE: templates/real-from-dwd-ana/prepare_experiment.sh
-    DEPENDENCIES: BUILD_ICON
-    RUNNING: once
-    WALLCLOCK: 01:00
-
-  PREPARE_DATE:
-    FILE: templates/real-from-dwd-ana/prepare_date.sh
-    RUNNING: date
-    WALLCLOCK: 01:00
-
-  PREPARE_MEMBER:
-    FILE: templates/real-from-dwd-ana/prepare_member.sh
-    DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE
-    RUNNING: member
-    WALLCLOCK: 01:00
-
-  PREPARE_CHUNK:
-    FILE: templates/real-from-dwd-ana/prepare_chunk.py
-    DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
-    WALLCLOCK: 00:05
-    RUNNING: chunk
-    TYPE: python
-    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
-
-
-  RUN_ICON:
-    FILE: templates/common/run_icon.sh
-    DEPENDENCIES: PREPARE_CHUNK
-    WALLCLOCK: 04:00
-    RUNNING: chunk
-    PROCESSORS: 64
-    MEMORY: 81920
-    CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive"]
-
-  COMPRESS:
-    FILE: templates/common/compress.py
-    DEPENDENCIES: RUN_ICON BUILD_PYTHON_ENVIRONMENT COMPRESS-1
-    RUNNING: chunk
-    TYPE: python
-    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
-    PROCESSORS: 16
-    MEMORY: 16384
-    WALLCLOCK: 01:00
-
-  TRANSFER:
-    FILE: templates/common/transfer.sh
-    DEPENDENCIES: COMPRESS
-    # Since this is running locally, can simply leave a long wallclock.
-    WALLCLOCK: 24:00
-    RUNNING: member
-    PLATFORM: LOCAL
-
-  CLEAN:
-    FILE: templates/common/clean.sh
-    DEPENDENCIES: TRANSFER
-    WALLCLOCK: 00:10
-    RUNNING: member
\ No newline at end of file
diff --git a/conf/real-from-dwd-ana/jobs.yml b/conf/real-from-dwd-ana/jobs.yml
new file mode 100644
index 0000000000000000000000000000000000000000..04e57d429e8dee534b1b7e7c51edc9c279a06378
--- /dev/null
+++ b/conf/real-from-dwd-ana/jobs.yml
@@ -0,0 +1,88 @@
+JOBS:
+  TRANSFER_PROJECT:
+    FILE: templates/common/transfer_project.sh
+    PLATFORM: LOCAL
+
+  BUILD_ICON:
+    FILE: templates/common/build_icon.sh
+    DEPENDENCIES: TRANSFER_PROJECT
+    WALLCLOCK: 04:00
+    PROCESSORS: 16
+    RETRIALS: 2 # retry because spack downloads sometimes timeout
+    NODES: 1
+
+  BUILD_PYTHON_ENVIRONMENT:
+    FILE: templates/common/build_python_environment.sh
+    # Right now we rely on spack for building icon and having a python interpreter, so we need this dependency:
+    DEPENDENCIES: BUILD_ICON
+    WALLCLOCK: 01:00
+    PROCESSORS: 16
+    NODES: 1
+
+  PREPARE_EXPERIMENT:
+    FILE: templates/real-from-dwd-ana/prepare_experiment.sh
+    DEPENDENCIES: BUILD_ICON
+    RUNNING: once
+    WALLCLOCK: 01:00
+
+  PREPARE_DATE_LOCAL:
+    FILE: templates/real-from-dwd-ana/prepare_date_local.sh
+    RUNNING: date
+    WALLCLOCK: 01:00
+    PLATFORM: LOCAL
+
+  PREPARE_DATE_REMOTE:
+    FILE: templates/real-from-dwd-ana/prepare_date_remote.sh
+    RUNNING: date
+    WALLCLOCK: 01:00
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
+
+  PREPARE_MEMBER:
+    FILE: templates/real-from-dwd-ana/prepare_member.sh
+    DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE_REMOTE PREPARE_DATE_LOCAL
+    RUNNING: member
+    WALLCLOCK: 01:00
+
+  PREPARE_NAMELIST:
+    FILE: templates/real-from-dwd-ana/prepare_namelist.py
+    DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
+    WALLCLOCK: 00:05
+    RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
+
+
+  RUN_ICON:
+    FILE: templates/common/run_icon.sh
+    DEPENDENCIES: PREPARE_NAMELIST
+    WALLCLOCK: 04:00
+    RUNNING: chunk
+    PROCESSORS: 64
+    MEMORY: 81920
+    CUSTOM_DIRECTIVES: [ "#SBATCH --exclusive" ]
+
+  COMPRESS:
+    FILE: templates/common/compress.py
+    DEPENDENCIES: RUN_ICON BUILD_PYTHON_ENVIRONMENT COMPRESS-1
+    RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+    PROCESSORS: 16
+    MEMORY: 16384
+    WALLCLOCK: 01:00
+
+  TRANSFER:
+    FILE: templates/common/transfer.sh
+    DEPENDENCIES: COMPRESS
+    # Since this is running locally, can simply leave a long wallclock.
+    WALLCLOCK: 24:00
+    RUNNING: member
+    PLATFORM: LOCAL
+
+  CLEAN:
+    FILE: templates/common/clean.sh
+    DEPENDENCIES: TRANSFER
+    WALLCLOCK: 00:10
+    RUNNING: member
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
diff --git a/conf/real-from-dwd-ana/proj.yaml b/conf/real-from-dwd-ana/proj.yaml
deleted file mode 100644
index 15528b240fc483832be08a57802851338bf51dc9..0000000000000000000000000000000000000000
--- a/conf/real-from-dwd-ana/proj.yaml
+++ /dev/null
@@ -1,42 +0,0 @@
-spack:
-  url: git@gitlab.physik.uni-muenchen.de:LDAP_rbg/spack.git
-  branch: lmu/ubuntu20.04-icon
-  compiler: gcc@11.3.0
-
-icon:
-  version: 2.6.5-nwp0
-
-python_environment:
-  # Name of the virtual environment in the remote platform experiment folder
-  folder_name: python_environment
-  requirements:
-    # Because there's an issue with numba, for now we need to keep a specific version of numpy
-    - numpy==1.23
-    - enstools-compression
-    # Just to try a library from a git repository.
-    - git+https://gitlab.physik.uni-muenchen.de/Oriol.Tinto/otils.git
-    - f90nml
-
-simulation:
-  dynamics_grid_filename: icon_grid_0016_R02B06_G.nc
-  radiation_grid_filename: icon_grid_0015_R02B05_R.nc
-  external_parameters_filename: icon_extpar_0016_R02B06_G_20131206.nc
-  date_format: '%Y-%m-%dT%H:%M:%SZ'
-  namelist_paths:
-    # Path to the namelists
-    master: "%HPCROOTDIR%/proj/namelists/real-from-dwd-ana/icon_master.namelist"
-    atmosphere: "%HPCROOTDIR%/proj/namelists/real-from-dwd-ana/icon_atmosphere.namelist"
-
-  # List of output file names that will be copied (Wildcards * allowed)
-  output_file_names: "init_DOM01_ML_*.nc latbc_DOM01_ML_*.nc"
-  files_to_clean: "*.nc"
-
-data_management:
-  # Where do we put the output files afterwards?
-  local_destination_folder: /scratch/o/Oriol.Tinto/tmp/
-
-initial_conditions:
-  # Where are we getting our initial data from?
-  parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/
-  member: 1
-
diff --git a/conf/real-from-dwd-ana/simulation.yml b/conf/real-from-dwd-ana/simulation.yml
new file mode 100644
index 0000000000000000000000000000000000000000..c3c274fe8299d0be6fd13ffb5a4bb65953597f23
--- /dev/null
+++ b/conf/real-from-dwd-ana/simulation.yml
@@ -0,0 +1,20 @@
+simulation:
+  dynamics_grid_filename: icon_grid_0016_R02B06_G.nc
+  radiation_grid_filename: icon_grid_0015_R02B05_R.nc
+  external_parameters_filename: icon_extpar_0016_R02B06_G_20131206.nc
+  date_format: '%Y-%m-%dT%H:%M:%SZ'
+  namelist_paths:
+    # Path to the namelists
+    master: "%HPCROOTDIR%/proj/namelists/real-from-dwd-ana/icon_master.namelist"
+    atmosphere: "%HPCROOTDIR%/proj/namelists/real-from-dwd-ana/icon_atmosphere.namelist"
+
+  # List of output file names that will be copied (Wildcards * allowed)
+  output_file_names: "init_DOM01_ML_*.nc latbc_DOM01_ML_*.nc"
+  files_to_clean: "*.nc"
+
+  initial_conditions:
+    # Where are we getting our initial data from?
+    local: true
+    parent_folder: /archive/meteo/external-models/dwd/icon/oper/icon_oper_eps_gridded-global_rolling/
+    member: 1
+
diff --git a/conf/real-from-ideal/jobs.yaml b/conf/real-from-ideal/jobs.yaml
deleted file mode 100644
index 63c0f201659f711ae5e56e17fcc723532408cf94..0000000000000000000000000000000000000000
--- a/conf/real-from-ideal/jobs.yaml
+++ /dev/null
@@ -1,163 +0,0 @@
-# Example job with all options specified
-JOBS:
-  ## Job name
-  # JOBNAME:
-  ## Script to execute. If not specified, job will be omitted from workflow. "You can also specify additional files separated by a ",".
-  # Note: The post-processed additional_files will be sent to %HPCROOT%/LOG_%EXPID%
-  ## Path relative to the project directory
-  # FILE:
-  ## Platform to execute the job. If not specified, defaults to HPCARCH in expedf file.
-  ## LOCAL is always defined and refers to current machine
-  # PLATFORM:
-  ## Queue to add the job to. If not specified, uses PLATFORM default.
-  # QUEUE:
-  ## Defines dependencies from job as a list of parents jobs separated by spaces.
-  ## Dependencies to jobs in previous chunk, member o startdate, use -(DISTANCE)
-  # DEPENDENCIES:INI SIM-1 CLEAN-2
-  ## Define if jobs runs once, once per stardate, once per member or once per chunk. Options: once, date, member, chunk.
-  ## If not specified, defaults to once
-  # RUNNING:once
-  ## Specifies that job has only to be run after X dates, members or chunk. A job will always be created for the last
-  ## If not specified, defaults to 1
-  # FREQUENCY:3
-  ## On a job with FREQUENCY > 1, if True, the dependencies are evaluated against all
-  ## jobs in the frequency interval, otherwise only evaluate dependencies against current
-  ## iteration.
-  ## If not specified, defaults to True
-  # WAIT:False
-  ## Defines if job is only to be executed in reruns. If not specified, defaults to false.
-  # RERUN_ONLY:False
-  ## Wallclock to be submitted to the HPC queue in format HH:MM
-  # WALLCLOCK:00:05
-
-  ## Processors number to be submitted to the HPC. If not specified, defaults to 1.
-  ## Wallclock chunk increase (WALLCLOCK will be increased according to the formula WALLCLOCK + WCHUNKINC * (chunk - 1)).
-  ## Ideal for sequences of jobs that change their expected running time according to the current chunk.
-  # WCHUNKINC: 00:01
-  # PROCESSORS: 1
-  ## Threads number to be submitted to the HPC. If not specified, defaults to 1.
-  # THREADS: 1
-  ## Enables hyper-threading. If not specified, defaults to false.
-  # HYPERTHREADING: false
-  ## Tasks number to be submitted to the HPC. If not specified, defaults to 1.
-  # Tasks: 1
-  ## Memory requirements for the job in MB
-  # MEMORY: 4096
-  ##  Number of retrials if a job fails. If not specified, defaults to the value given on experiment's autosubmit.yml
-  # RETRIALS: 4
-  ##  Allows to put a delay between retries, of retrials if a job fails. If not specified, it will be static
-  # DELAY_RETRY_TIME: 11
-  # DELAY_RETRY_TIME: +11 # will wait 11,22,33,44...
-  # DELAY_RETRY_TIME: *11 # will wait 11,110,1110,11110...
-  ## Some jobs can not be checked before running previous jobs. Set this option to false if that is the case
-  # CHECK: False
-  ## Select the interpreter that will run the job. Options: bash, python, r Default: bash
-  # TYPE: bash
-  ## Specify the path to the interpreter. If empty, use system default based on job type  . Default: empty
-  # EXECUTABLE: /my_python_env/python3
-  TRANSFER_PROJECT:
-    FILE: templates/common/transfer_project.sh
-    PLATFORM: LOCAL
-
-  BUILD_ICON:
-    FILE: templates/common/build_icon.sh
-    DEPENDENCIES: TRANSFER_PROJECT
-    WALLCLOCK: 01:00
-    PROCESSORS: 16
-
-  BUILD_PYTHON_ENVIRONMENT:
-    FILE: templates/common/build_python_environment.sh
-    # Right now we rely on spack for building icon and having a python interpreter, so we need this dependency:
-    DEPENDENCIES: BUILD_ICON
-    WALLCLOCK: 01:00
-    PROCESSORS: 16
-
-  PREPARE_EXPERIMENT:
-    FILE: templates/real-from-ideal/prepare_experiment.sh
-    DEPENDENCIES: BUILD_ICON
-    RUNNING: once
-    WALLCLOCK: 00:10
-    PLATFORM: LOGIN
-
-  PREPARE_IDEAL_DIRECTORY:
-    FILE: templates/real-from-ideal/prepare_ideal_directory.sh
-    DEPENDENCIES: PREPARE_EXPERIMENT
-    RUNNING: date
-    WALLCLOCK: 00:10
-    PLATFORM: LOGIN
-
-  PREPARE_IDEAL_NAMELIST:
-    FILE: templates/real-from-ideal/prepare_ideal_namelist.py
-    DEPENDENCIES: PREPARE_IDEAL_DIRECTORY BUILD_PYTHON_ENVIRONMENT TRANSFER_PROJECT
-    RUNNING: date
-    WALLCLOCK: 00:10
-    TYPE: python
-    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
-    PLATFORM: LOGIN
-
-  RUN_IDEAL:
-    FILE: templates/real-from-ideal/run_ideal.sh
-    DEPENDENCIES: PREPARE_IDEAL_NAMELIST
-    RUNNING: date
-    WALLCLOCK: 01:00
-
-  EXTPAR_FROM_IDEALIZED:
-    FILE: templates/real-from-ideal/extpar_from_idealized.py
-    DEPENDENCIES: RUN_IDEAL
-    RUNNING: date
-    WALLCLOCK: 01:00
-    TYPE: python
-    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
-
-  FG_ANA_FROM_IDEALIZED:
-    FILE: templates/real-from-ideal/fg_ana_from_idealized.py
-    DEPENDENCIES: RUN_IDEAL
-    RUNNING: date
-    WALLCLOCK: 01:00
-    TYPE: python
-    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
-
-
-  PREPARE_MEMBER:
-    FILE: templates/real-from-ideal/prepare_member.sh
-    DEPENDENCIES: FG_ANA_FROM_IDEALIZED EXTPAR_FROM_IDEALIZED
-    RUNNING: member
-    WALLCLOCK: 01:00
-    PLATFORM: LOGIN
-
-  PREPARE_CHUNK:
-    FILE: templates/real-from-ideal/prepare_chunk.py
-    DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
-    WALLCLOCK: 00:05
-    RUNNING: chunk
-    TYPE: python
-    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
-    PLATFORM: LOGIN
-
-  RUN_ICON:
-    FILE: templates/common/run_icon.sh
-    DEPENDENCIES: PREPARE_CHUNK
-    WALLCLOCK: 01:00
-    RUNNING: chunk
-    PROCESSORS: 16
-
-  COMPRESS:
-    FILE: templates/common/compress.py
-    DEPENDENCIES: RUN_ICON COMPRESS-1 BUILD_PYTHON_ENVIRONMENT
-    RUNNING: chunk
-    TYPE: python
-    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
-
-  TRANSFER:
-    FILE: templates/common/transfer.sh
-    DEPENDENCIES: COMPRESS
-    WALLCLOCK: 00:10
-    RUNNING: member
-    PLATFORM: LOCAL
-
-  CLEAN:
-    FILE: templates/common/clean.sh
-    DEPENDENCIES: TRANSFER
-    WALLCLOCK: 00:10
-    RUNNING: member
-    PLATFORM: LOGIN
diff --git a/conf/real-from-ideal/jobs.yml b/conf/real-from-ideal/jobs.yml
new file mode 100644
index 0000000000000000000000000000000000000000..a098d920334182e985989dfe0b8d87466e36293b
--- /dev/null
+++ b/conf/real-from-ideal/jobs.yml
@@ -0,0 +1,111 @@
+JOBS:
+  TRANSFER_PROJECT:
+    FILE: templates/common/transfer_project.sh
+    PLATFORM: LOCAL
+
+  BUILD_ICON:
+    FILE: templates/common/build_icon.sh
+    DEPENDENCIES: TRANSFER_PROJECT
+    WALLCLOCK: 04:00
+    PROCESSORS: 16
+    RETRIALS: 2 # retry because spack downloads sometimes timeout
+    NODES: 1
+
+  BUILD_PYTHON_ENVIRONMENT:
+    FILE: templates/common/build_python_environment.sh
+    # Right now we rely on spack for building icon and having a python interpreter, so we need this dependency:
+    DEPENDENCIES: BUILD_ICON
+    WALLCLOCK: 01:00
+    PROCESSORS: 16
+    NODES: 1
+
+  PREPARE_EXPERIMENT:
+    FILE: templates/real-from-ideal/prepare_experiment.sh
+    DEPENDENCIES: BUILD_ICON
+    RUNNING: once
+    WALLCLOCK: 00:10
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
+
+  PREPARE_IDEAL_DIRECTORY:
+    FILE: templates/real-from-ideal/prepare_ideal_directory.sh
+    DEPENDENCIES: PREPARE_EXPERIMENT
+    RUNNING: date
+    WALLCLOCK: 00:10
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
+
+  PREPARE_IDEAL_NAMELIST:
+    FILE: templates/real-from-ideal/prepare_ideal_namelist.py
+    DEPENDENCIES: PREPARE_IDEAL_DIRECTORY BUILD_PYTHON_ENVIRONMENT TRANSFER_PROJECT
+    RUNNING: date
+    WALLCLOCK: 00:10
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
+
+  RUN_IDEAL:
+    FILE: templates/real-from-ideal/run_ideal.sh
+    DEPENDENCIES: PREPARE_IDEAL_NAMELIST
+    RUNNING: date
+    WALLCLOCK: 01:00
+
+  EXTPAR_FROM_IDEALIZED:
+    FILE: templates/real-from-ideal/extpar_from_idealized.py
+    DEPENDENCIES: RUN_IDEAL
+    RUNNING: date
+    WALLCLOCK: 01:00
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
+  FG_ANA_FROM_IDEALIZED:
+    FILE: templates/real-from-ideal/fg_ana_from_idealized.py
+    DEPENDENCIES: RUN_IDEAL
+    RUNNING: date
+    WALLCLOCK: 01:00
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
+
+  PREPARE_MEMBER:
+    FILE: templates/real-from-ideal/prepare_member.sh
+    DEPENDENCIES: FG_ANA_FROM_IDEALIZED EXTPAR_FROM_IDEALIZED
+    RUNNING: member
+    WALLCLOCK: 01:00
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
+
+  PREPARE_NAMELIST:
+    FILE: templates/real-from-ideal/prepare_namelist.py
+    DEPENDENCIES: TRANSFER_PROJECT BUILD_PYTHON_ENVIRONMENT PREPARE_MEMBER RUN_ICON-1
+    WALLCLOCK: 00:05
+    RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
+
+  RUN_ICON:
+    FILE: templates/common/run_icon.sh
+    DEPENDENCIES: PREPARE_NAMELIST
+    WALLCLOCK: 01:00
+    RUNNING: chunk
+    PROCESSORS: 16
+    NODES: 1
+
+  COMPRESS:
+    FILE: templates/common/compress.py
+    DEPENDENCIES: RUN_ICON COMPRESS-1 BUILD_PYTHON_ENVIRONMENT
+    RUNNING: chunk
+    TYPE: python
+    EXECUTABLE: "%HPCROOTDIR%/%python_environment.folder_name%/bin/python3"
+
+  TRANSFER:
+    FILE: templates/common/transfer.sh
+    DEPENDENCIES: COMPRESS
+    WALLCLOCK: 00:10
+    RUNNING: member
+    PLATFORM: LOCAL
+
+  CLEAN:
+    FILE: templates/common/clean.sh
+    DEPENDENCIES: TRANSFER
+    WALLCLOCK: 00:10
+    RUNNING: member
+    PLATFORM: "%DEFAULT.HPCARCH%_LOGIN"
diff --git a/conf/real-from-ideal/proj.yaml b/conf/real-from-ideal/proj.yaml
deleted file mode 100644
index 5585b798f1cefefa62a105e20e7f0936328d212c..0000000000000000000000000000000000000000
--- a/conf/real-from-ideal/proj.yaml
+++ /dev/null
@@ -1,39 +0,0 @@
-spack:
-  url: git@gitlab.physik.uni-muenchen.de:LDAP_rbg/spack.git
-  branch: lmu/ubuntu20.04-icon
-  compiler: gcc@11.3.0
-
-icon:
-  version: 2.6.5-nwp0
-
-python_environment:
-  # Name of the virtual environment in the remote platform experiment folder
-  folder_name: python_environment
-  requirements:
-    # Because there's an issue with numba, for now we need to keep a specific version of numpy
-    - numpy==1.23
-    - enstools-compression
-    # Just to try a library from a git repository.
-    - git+https://gitlab.physik.uni-muenchen.de/Oriol.Tinto/otils.git
-    - f90nml
-
-simulation:
-  dynamics_grid_filename: icon_grid_0012_R02B04_G.nc
-  radiation_grid_filename: icon_grid_0011_R02B03_R.nc
-  external_parameters_filename: extpar_DOM01.nc
-  date_format: '%Y-%m-%dT%H:%M:%SZ'
-  namelist_paths:
-    # Path to the name lists
-    master: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_master.namelist"
-    atmosphere:
-      ideal: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_atmosphere_ideal.namelist"
-      real: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_atmosphere_real.namelist"
-
-
-  # List of output file names that will be copied (Wildcards * allowed)
-  output_file_names: "latbc_DOM01_ML_*.nc"
-  files_to_clean: "*.nc"
-
-data_management:
-  # Where do we put the output files afterwards?
-  local_destination_folder: /scratch/o/Oriol.Tinto/tmp/
diff --git a/conf/real-from-ideal/simulation.yml b/conf/real-from-ideal/simulation.yml
new file mode 100644
index 0000000000000000000000000000000000000000..ebafd90495920a7c402a28b4be70f78750129df3
--- /dev/null
+++ b/conf/real-from-ideal/simulation.yml
@@ -0,0 +1,16 @@
+simulation:
+  dynamics_grid_filename: icon_grid_0012_R02B04_G.nc
+  radiation_grid_filename: icon_grid_0011_R02B03_R.nc
+  external_parameters_filename: extpar_DOM01.nc
+  date_format: '%Y-%m-%dT%H:%M:%SZ'
+  namelist_paths:
+    # Path to the name lists
+    master: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_master.namelist"
+    atmosphere:
+      ideal: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_atmosphere_ideal.namelist"
+      real: "%HPCROOTDIR%/proj/namelists/real-from-ideal/icon_atmosphere_real.namelist"
+
+
+  # List of output file names that will be copied (Wildcards * allowed)
+  output_file_names: "latbc_DOM01_ML_*.nc"
+  files_to_clean: "*.nc"
\ No newline at end of file
diff --git a/platforms/common/spack_utils.sh b/platforms/common/spack_utils.sh
index 0650e618d850d432bd6f49962c5880d9807fb741..02ec602587a49823bce10091fbd4933741a37c02 100644
--- a/platforms/common/spack_utils.sh
+++ b/platforms/common/spack_utils.sh
@@ -1,27 +1,74 @@
-function spack_env() {
-export SPACK_SETUP_ENV=spack/share/spack/setup-env.sh
-export SPACK_VENV=spack_icon_env
-export SPACK_USER_CACHE_PATH=${WORKDIR}/SPACK_USER_CACHE_PATH
-export SPACK_DISABLE_LOCAL_CONFIG=true
-}
+function _install_spack() {
+  if [ ! -e ${SPACK_ROOT} ]; then
+    echo "Cloning to ${SPACK_ROOT}"
+    git clone --depth 1 ${SPACK_URL} -b ${SPACK_BRANCH} ${SPACK_ROOT}
+  fi
+
+  _init_spack "$SPACK_INIT_CMD" "$SPACK_ROOT"
 
-function install_spack() {
-spack_env
+  if [ ! -z "${SPACK_EXTERNALS}" ] ; then
+    for ext in ${SPACK_EXTERNALS}; do
+      spack external find $ext
+    done
+  fi
 
-#TODO: Would be good to enable the re-utilization of existing spack packages (via packages.yaml or upstreams.yaml)
-if [ ! -f ${SPACK_SETUP_ENV} ]; then
-  git clone ${SPACK_URL} -b ${SPACK_BRANCH}
-fi
+  if [[ $(spack compiler info ${SPACK_COMPILER}) ]]; then
+    echo "Found Compiler $(spack compiler info ${SPACK_COMPILER})"
+  else
+    echo "could not find compiler, will now try to install it... this may take a while"
+    spack install --reuse ${SPACK_COMPILER}
+    spack compiler add $(spack location --install-dir $SPACK_COMPILER)
+  fi
+}
 
-. ${SPACK_SETUP_ENV}
-spack env create $SPACK_VENV
-spack env activate -p $SPACK_VENV
-spack compiler find
+function _init_spack() {
+  SPACK_INIT_CMD=$1
+  SPACK_ROOT=$2
+  if [ -z "$SPACK_INIT_CMD" ] && [ ! -z "${SPACK_ROOT}" ]; then
+    echo "Empty SPACK_INIT_CMD -> trying to source config file of spack root: $SPACK_ROOT/share/spack/setup-env.sh"
+    . $SPACK_ROOT/share/spack/setup-env.sh
+  else
+    echo "Executing SPACK_INIT_CMD: $SPACK_INIT_CMD"
+    $SPACK_INIT_CMD
+  fi
 }
 
 function load_spack() {
-spack_env
-if [ ! -f ${SPACK_SETUP_ENV} ]; then install_spack; fi
-. ${SPACK_SETUP_ENV}
-spack env activate -p $SPACK_VENV
+  export SPACK_INIT_CMD=$1
+  export SPACK_ROOT=$2                 # i.e.: spack
+  export SPACK_URL=$3                  # i.e.: https://github.com/spack/spack.git
+  export SPACK_BRANCH=$4               # i.e.: develop
+  export SPACK_EXTERNALS=$5            # i.e.: slurm
+  export SPACK_COMPILER=$6             # i.e.: gcc@12.2.0
+  export SPACK_DISABLE_LOCAL_CONFIG=$7 # i.e.: true
+  export SPACK_USER_CACHE_PATH=$8      # i.e.: ${SPACK_ROOT}/spack_user_cache_path
+  export SPACK_USER_CONFIG_PATH=$9     # i.e.: ${SPACK_ROOT}/spack_user_config_path
+
+  if [ "$SPACK_DISABLE_LOCAL_CONFIG" != "True" ]; then
+    unset SPACK_DISABLE_LOCAL_CONFIG
+  fi
+
+  echo "SPACK_INIT_CMD                = $SPACK_INIT_CMD"
+  echo "SPACK_ROOT                    = $SPACK_ROOT"
+  echo "SPACK_URL                     = $SPACK_URL"
+  echo "SPACK_BRANCH                  = $SPACK_BRANCH"
+  echo "SPACK_EXTERNALS               = $SPACK_EXTERNALS"
+  echo "SPACK_COMPILER                = $SPACK_COMPILER"
+  echo "SPACK_DISABLE_LOCAL_CONFIG    = ${SPACK_DISABLE_LOCAL_CONFIG:-False}"
+  echo "SPACK_USER_CACHE_PATH         = $SPACK_USER_CACHE_PATH"
+  echo "SPACK_USER_CONFIG_PATH        = $SPACK_USER_CONFIG_PATH"
+
+  if [ -z "$SPACK_USER_CACHE_PATH" ]; then
+    unset SPACK_USER_CACHE_PATH
+  fi
+
+  if [ ! -z "${SPACK_ROOT}" ] ; then _install_spack; fi
+
+  if [ -z "$SPACK_USER_CONFIG_PATH" ]; then
+    unset SPACK_USER_CONFIG_PATH
+  fi
+
+  _init_spack "$SPACK_INIT_CMD" "$SPACK_ROOT"
+
+  echo "Using spack from $(which spack)"
 }
diff --git a/spack_repo/packages/dwd-icon-tools/package.py b/spack_repo/packages/dwd-icon-tools/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..fb032c8b65c9258e325b8b9fbfdbf9c5d71acf13
--- /dev/null
+++ b/spack_repo/packages/dwd-icon-tools/package.py
@@ -0,0 +1,88 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+# ----------------------------------------------------------------------------
+# If you submit this package back to Spack as a pull request,
+# please first remove this boilerplate and all FIXME comments.
+#
+# This is a template package file for Spack.  We've put "FIXME"
+# next to all the things you'll want to change. Once you've handled
+# them, you can save this file and test your package like this:
+#
+#     spack install dwd-icon-tools
+#
+# You can edit this file again by typing:
+#
+#     spack edit dwd-icon-tools
+#
+# See the Spack documentation for more information on packaging.
+# ----------------------------------------------------------------------------
+
+from spack.package import *
+from pathlib import Path
+
+
+class DwdIconTools(Package):
+    """DWD Icon Tools"""
+
+    homepage = "https://www.example.com"
+    # maintainers("oriol.tinto")
+    git = "ssh://git@gitlab.lrz.de/dkrz-mirror/dwd_icon_tools.git"
+
+    version("2.5.2", branch="icontools-2.5.2")
+
+    depends_on("netcdf-c")
+    depends_on("eccodes")
+
+    def patch(self):
+        """
+        Because of the lack of access rights to the original submodule repositories,
+        we patch the gitmodules file to point to a different mirror.
+        """
+        git_submodules_file = Path().cwd() / ".gitmodules"
+        git_mirror = "git@gitlab.lrz.de:dkrz-mirror"
+        git_modules_patch = f"""
+        [submodule "externals/libcdi"]
+        path = externals/libcdi
+        url = {git_mirror}/libcdi.git
+        """
+
+        # Replace the content of the original file with the patch
+        with git_submodules_file.open("w") as out_f:
+            out_f.write(git_modules_patch)
+
+        # Run git submodule update
+        git = which("git")
+        git("submodule", "update", "--init", "--recursive")
+
+
+    def setup_build_environment(self, env):
+        spec = self.spec
+        # Some environment variables to set
+        env_variables_to_set = {
+#           "CC": spec["mpi"].mpicc,
+#           "FC": spec["mpi"].mpifc,
+#           "F77": spec["mpi"].mpif77,
+            "CXXFLAGS": "-O2 -g -fopenmp -Wunused -DNOMPI",
+            "FCFLAGS": "-I/usr/include --std=f2008 -O2 -g -cpp -fopenmp -fbounds-check -Wunused -DNOMPI",
+            "LIBS": "-leccodes -lgfortran -lhdf5 -lxml2",
+        }
+        for variable, value in env_variables_to_set.items():
+                env.set(variable, value)
+
+
+
+    def install(self, spec, prefix):
+        options = [
+            f"--prefix={prefix}",
+            "--enable-grib2",
+            "--enable-iso-c-interface",
+            f"--with-netcdf={spec['netcdf-c'].prefix}"
+        ]
+
+        configure(*options)
+        make()
+        make("install")
+
diff --git a/spack_repo/packages/gettext/nvhpc-builtin.patch b/spack_repo/packages/gettext/nvhpc-builtin.patch
new file mode 100644
index 0000000000000000000000000000000000000000..8edb2fc5d425c927555d2f716f1947c0a880d391
--- /dev/null
+++ b/spack_repo/packages/gettext/nvhpc-builtin.patch
@@ -0,0 +1,166 @@
+--- a/gettext-runtime/gnulib-lib/xalloc-oversized.h	2020-08-21 07:51:29.459375578 -0700
++++ b/gettext-runtime/gnulib-lib/xalloc-oversized.h	2020-08-21 07:53:18.571795663 -0700
+@@ -41,10 +41,10 @@
+    positive and N must be nonnegative.  This is a macro, not a
+    function, so that it works correctly even when SIZE_MAX < N.  */
+ 
+-#if 7 <= __GNUC__
++#if 7 <= __GNUC__ && !defined __NVCOMPILER
+ # define xalloc_oversized(n, s) \
+    __builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
+-#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
++#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ && !defined __NVCOMPILER
+ # define xalloc_oversized(n, s) \
+    (__builtin_constant_p (n) && __builtin_constant_p (s) \
+     ? __xalloc_oversized (n, s) \
+--- a/gettext-runtime/gnulib-lib/intprops.h	2020-08-21 07:51:20.668341900 -0700
++++ b/gettext-runtime/gnulib-lib/intprops.h	2020-08-21 07:52:43.906661856 -0700
+@@ -222,7 +222,7 @@
+ 
+ /* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
+    (A, B, P) work when P is non-null.  */
+-#if 5 <= __GNUC__ && !defined __ICC
++#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
+-#elif defined __has_builtin
++#elif defined __has_builtin && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
+@@ -240,7 +240,7 @@
+ 
+ /* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
+    __builtin_mul_overflow_p and __builtin_mul_overflow_p.  */
+-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
++#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
+ 
+ /* The _GL*_OVERFLOW macros have the same restrictions as the
+    *_RANGE_OVERFLOW macros, except that they do not assume that operands
+--- a/gettext-tools/gnulib-lib/xalloc-oversized.h	2020-08-21 10:19:23.875281647 -0700
++++ b/gettext-tools/gnulib-lib/xalloc-oversized.h	2020-08-21 10:20:40.650583499 -0700
+@@ -41,10 +41,10 @@
+    positive and N must be nonnegative.  This is a macro, not a
+    function, so that it works correctly even when SIZE_MAX < N.  */
+ 
+-#if 7 <= __GNUC__
++#if 7 <= __GNUC__ && !defined __NVCOMPILER
+ # define xalloc_oversized(n, s) \
+    __builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
+-#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
++#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ && !defined __NVCOMPILER
+ # define xalloc_oversized(n, s) \
+    (__builtin_constant_p (n) && __builtin_constant_p (s) \
+     ? __xalloc_oversized (n, s) \
+--- a/gettext-tools/gnulib-lib/intprops.h	2020-08-21 10:18:38.650103825 -0700
++++ b/gettext-tools/gnulib-lib/intprops.h	2020-08-21 10:19:12.379236445 -0700
+@@ -222,7 +222,7 @@
+ 
+ /* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
+    (A, B, P) work when P is non-null.  */
+-#if 5 <= __GNUC__ && !defined __ICC
++#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
+-#elif defined __has_builtin
++#elif defined __has_builtin && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
+@@ -240,7 +240,7 @@
+ 
+ /* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
+    __builtin_mul_overflow_p and __builtin_mul_overflow_p.  */
+-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
++#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
+ 
+ /* The _GL*_OVERFLOW macros have the same restrictions as the
+    *_RANGE_OVERFLOW macros, except that they do not assume that operands
+--- a/gettext-tools/libgrep/intprops.h	2020-08-21 10:31:00.726022663 -0700
++++ b/gettext-tools/libgrep/intprops.h	2020-08-21 10:31:29.946137693 -0700
+@@ -222,7 +222,7 @@
+ 
+ /* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
+    (A, B, P) work when P is non-null.  */
+-#if 5 <= __GNUC__ && !defined __ICC
++#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
+-#elif defined __has_builtin
++#elif defined __has_builtin && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
+@@ -240,7 +240,7 @@
+ 
+ /* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
+    __builtin_mul_overflow_p and __builtin_mul_overflow_p.  */
+-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
++#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
+ 
+ /* The _GL*_OVERFLOW macros have the same restrictions as the
+    *_RANGE_OVERFLOW macros, except that they do not assume that operands
+--- a/gettext-tools/libgettextpo/xalloc-oversized.h	2020-08-21 11:19:50.065564273 -0700
++++ b/gettext-tools/libgettextpo/xalloc-oversized.h	2020-08-21 11:21:14.732898185 -0700
+@@ -41,10 +41,10 @@
+    positive and N must be nonnegative.  This is a macro, not a
+    function, so that it works correctly even when SIZE_MAX < N.  */
+ 
+-#if 7 <= __GNUC__
++#if 7 <= __GNUC__ && !defined __NVCOMPILER
+ # define xalloc_oversized(n, s) \
+    __builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
+-#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
++#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ && !defined __NVCOMPILER
+ # define xalloc_oversized(n, s) \
+    (__builtin_constant_p (n) && __builtin_constant_p (s) \
+     ? __xalloc_oversized (n, s) \
+--- a/gettext-tools/libgettextpo/intprops.h	2020-08-21 11:19:58.703598336 -0700
++++ b/gettext-tools/libgettextpo/intprops.h	2020-08-21 11:20:37.612751786 -0700
+@@ -222,7 +222,7 @@
+ 
+ /* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
+    (A, B, P) work when P is non-null.  */
+-#if 5 <= __GNUC__ && !defined __ICC
++#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
+-#elif defined __has_builtin
++#elif defined __has_builtin && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
+@@ -240,7 +240,7 @@
+ 
+ /* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
+    __builtin_mul_overflow_p and __builtin_mul_overflow_p.  */
+-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
++#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
+ 
+ /* The _GL*_OVERFLOW macros have the same restrictions as the
+    *_RANGE_OVERFLOW macros, except that they do not assume that operands
+--- a/libtextstyle/lib/xalloc-oversized.h	2020-08-21 11:30:13.488022919 -0700
++++ b/libtextstyle/lib/xalloc-oversized.h	2020-08-21 11:31:26.561311097 -0700
+@@ -41,10 +41,10 @@
+    positive and N must be nonnegative.  This is a macro, not a
+    function, so that it works correctly even when SIZE_MAX < N.  */
+ 
+-#if 7 <= __GNUC__
++#if 7 <= __GNUC__ && !defined __NVCOMPILER
+ # define xalloc_oversized(n, s) \
+    __builtin_mul_overflow_p (n, s, (__xalloc_count_type) 1)
+-#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__
++#elif 5 <= __GNUC__ && !defined __ICC && !__STRICT_ANSI__ && !defined __NVCOMPILER
+ # define xalloc_oversized(n, s) \
+    (__builtin_constant_p (n) && __builtin_constant_p (s) \
+     ? __xalloc_oversized (n, s) \
+
+--- a/libtextstyle/lib/intprops.h	2020-08-21 11:30:24.283065492 -0700
++++ b/libtextstyle/lib/intprops.h	2020-08-21 11:30:54.415184325 -0700
+@@ -222,7 +222,7 @@
+ 
+ /* True if __builtin_add_overflow (A, B, P) and __builtin_sub_overflow
+    (A, B, P) work when P is non-null.  */
+-#if 5 <= __GNUC__ && !defined __ICC
++#if 5 <= __GNUC__ && !defined __ICC && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW 1
+-#elif defined __has_builtin
++#elif defined __has_builtin && !defined __NVCOMPILER
+ # define _GL_HAS_BUILTIN_ADD_OVERFLOW __has_builtin (__builtin_add_overflow)
+@@ -240,7 +240,7 @@
+ 
+ /* True if __builtin_add_overflow_p (A, B, C) works, and similarly for
+    __builtin_mul_overflow_p and __builtin_mul_overflow_p.  */
+-#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__)
++#define _GL_HAS_BUILTIN_OVERFLOW_P (7 <= __GNUC__ && !defined __NVCOMPILER)
+ 
+ /* The _GL*_OVERFLOW macros have the same restrictions as the
+    *_RANGE_OVERFLOW macros, except that they do not assume that operands
diff --git a/spack_repo/packages/gettext/nvhpc-export-symbols.patch b/spack_repo/packages/gettext/nvhpc-export-symbols.patch
new file mode 100644
index 0000000000000000000000000000000000000000..2a26037e311d9693ba02c10c26811ea54b36661a
--- /dev/null
+++ b/spack_repo/packages/gettext/nvhpc-export-symbols.patch
@@ -0,0 +1,31 @@
+--- a/gettext-runtime/intl/Makefile.in	2020-08-21 08:39:59.102729081 -0700
++++ b/gettext-runtime/intl/Makefile.in	2020-08-21 08:40:07.425761760 -0700
+@@ -1471,7 +1471,6 @@
+ OTHER_LDFLAGS = \
+   @LTLIBICONV@ @INTL_MACOSX_LIBS@ $(INTL_WINDOWS_LIBS) @LTLIBTHREAD@ \
+   -no-undefined \
+-  -export-symbols-regex '^([^g]|g[^l]|gl[^w]|glw[^t]|glwt[^h]|glwth[^r]|glwthr[^e]|glwthre[^a]|glwthrea[^d]).*' \
+   -version-info $(LTV_CURRENT):$(LTV_REVISION):$(LTV_AGE) \
+   -rpath $(libdir)
+
+--- a/gettext-tools/intl/Makefile.in	2020-08-21 07:57:18.357721212 -0700
++++ b/gettext-tools/intl/Makefile.in	2020-08-21 07:57:29.051762490 -0700
+@@ -2296,7 +2296,6 @@
+ OTHER_LDFLAGS = \
+   @LTLIBICONV@ @INTL_MACOSX_LIBS@ $(INTL_WINDOWS_LIBS) @LTLIBTHREAD@ \
+   -no-undefined \
+-  -export-symbols-regex '^([^g]|g[^l]|gl[^w]|glw[^t]|glwt[^h]|glwth[^r]|glwthr[^e]|glwthre[^a]|glwthrea[^d]).*' \
+   -version-info $(LTV_CURRENT):$(LTV_REVISION):$(LTV_AGE) \
+   -rpath $(libdir)
+ 
+--- a/libtextstyle/lib/Makefile.in	2020-08-21 08:49:08.277982271 -0700
++++ b/libtextstyle/lib/Makefile.in	2020-08-21 08:49:19.675030561 -0700
+@@ -1917,7 +1917,7 @@
+ libtextstyle_la_LDFLAGS = $(AM_LDFLAGS) -no-undefined $(FABS_LIBM) \
+ 	$(ISNAND_LIBM) $(ISNANF_LIBM) $(ISNANL_LIBM) $(LOG10_LIBM) \
+ 	$(LTLIBICONV) $(LTLIBINTL) $(POW_LIBM) $(am__append_8) \
+-	-no-undefined -export-symbols libtextstyle.sym -version-info \
++	-no-undefined -version-info \
+ 	$(LTV_CURRENT):$(LTV_REVISION):$(LTV_AGE) -rpath $(libdir)
+ 
+ # Use this preprocessor expression to decide whether #include_next works.
diff --git a/spack_repo/packages/gettext/nvhpc-long-width.patch b/spack_repo/packages/gettext/nvhpc-long-width.patch
new file mode 100644
index 0000000000000000000000000000000000000000..ca4c8e58b7603b9bfdcac9d7d4b4ce8b0cbce6c5
--- /dev/null
+++ b/spack_repo/packages/gettext/nvhpc-long-width.patch
@@ -0,0 +1,17 @@
+--- a/gettext-tools/libgrep/regex_internal.h	2020-08-21 09:14:20.039942370 -0700
++++ b/gettext-tools/libgrep/regex_internal.h	2020-08-21 10:06:57.840331452 -0700
+@@ -35,6 +35,14 @@
+ #include <intprops.h>
+ #include <verify.h>
+ 
++#ifndef __LONG_WIDTH__
++#if LONG_WIDTH
++#define __LONG_WIDTH__ LONG_WIDTH
++#else
++#define __LONG_WIDTH__ __WORDSIZE
++#endif
++#endif
++
+ #if defined DEBUG && DEBUG != 0
+ # include <assert.h>
+ # define DEBUG_ASSERT(x) assert (x)
diff --git a/spack_repo/packages/gettext/package.py b/spack_repo/packages/gettext/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..bafeb6e6d5c0f35b8983db76282529b57afb21f1
--- /dev/null
+++ b/spack_repo/packages/gettext/package.py
@@ -0,0 +1,121 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import re
+
+from spack.package import *
+
+
+class Gettext(AutotoolsPackage, GNUMirrorPackage):
+    """GNU internationalization (i18n) and localization (l10n) library."""
+
+    homepage = "https://www.gnu.org/software/gettext/"
+    gnu_mirror_path = "gettext/gettext-0.20.1.tar.xz"
+
+    maintainers("michaelkuhn")
+
+    executables = [r"^gettext$"]
+
+    # buggy install when calling msgfmt version("0.21.1", sha256="50dbc8f39797950aa2c98e939947c527e5ac9ebd2c1b99dd7b06ba33a6767ae6")
+    # buggy install when calling msgfmt version("0.21", sha256="d20fcbb537e02dcf1383197ba05bd0734ef7bf5db06bdb241eb69b7d16b73192")
+    version("0.20.2", sha256="b22b818e644c37f6e3d1643a1943c32c3a9bff726d601e53047d2682019ceaba")
+    version("0.20.1", sha256="53f02fbbec9e798b0faaf7c73272f83608e835c6288dd58be6c9bb54624a3800")
+    version("0.19.8.1", sha256="105556dbc5c3fbbc2aa0edb46d22d055748b6f5c7cd7a8d99f8e7eb84e938be4")
+    version("0.19.7", sha256="378fa86a091cec3acdece3c961bb8d8c0689906287809a8daa79dc0c6398d934")
+
+    # Recommended variants
+    variant("curses", default=True, description="Use libncurses")
+    variant("libxml2", default=True, description="Use libxml2")
+    variant("git", default=True, description="Enable git support")
+    variant("tar", default=True, description="Enable tar support")
+    variant("bzip2", default=True, description="Enable bzip2 support")
+    variant("xz", default=True, description="Enable xz support")
+
+    # Optional variants
+    variant("libunistring", default=False, description="Use libunistring")
+
+    depends_on("iconv")
+    # Recommended dependencies
+    depends_on("ncurses", when="+curses")
+    depends_on("libxml2", when="+libxml2")
+    # Java runtime and compiler (e.g. GNU gcj or kaffe)
+    # C# runtime and compiler (e.g. pnet or mono)
+    depends_on("tar", when="+tar")
+    # depends_on('gzip',     when='+gzip')
+    depends_on("bzip2", when="+bzip2")
+    depends_on("xz", when="+xz", type=("build", "link", "run"))
+
+    # Optional dependencies
+    # depends_on('glib')  # circular dependency?
+    # depends_on('libcroco@0.6.1:')
+    depends_on("libunistring", when="+libunistring")
+    # depends_on('cvs')
+
+    patch("test-verify-parallel-make-check.patch", when="@:0.19.8.1")
+    patch("nvhpc-builtin.patch", when="@:0.21.0 %nvhpc")
+    patch("nvhpc-export-symbols.patch", when="%nvhpc")
+    patch("nvhpc-long-width.patch", when="%nvhpc")
+
+    # Apply this only where we know that the system libc is glibc, be very careful:
+    @when("@:0.21.0 target=ppc64le:")
+    def patch(self):
+        for fn in ("gettext-tools/gnulib-lib/cdefs.h", "gettext-tools/libgrep/cdefs.h"):
+            with open(fn, "w") as f:
+                f.write("#include <sys/cdefs.h>\n")
+
+    @classmethod
+    def determine_version(cls, exe):
+        gettext = Executable(exe)
+        output = gettext("--version", output=str, error=str)
+        match = re.match(r"gettext(?: \(.+\)) ([\d.]+)", output)
+        return match.group(1) if match else None
+
+    def configure_args(self):
+        spec = self.spec
+
+        config_args = [
+            "--disable-java",
+            "--disable-csharp",
+            "--with-libiconv-prefix={0}".format(spec["iconv"].prefix),
+            "--with-included-glib",
+            "--with-included-gettext",
+            "--with-included-libcroco",
+            "--without-emacs",
+            "--with-lispdir=%s/emacs/site-lisp/gettext" % self.prefix.share,
+            "--without-cvs",
+        ]
+
+        if "+curses" in spec:
+            config_args.append("--with-ncurses-prefix={0}".format(spec["ncurses"].prefix))
+        else:
+            config_args.append("--disable-curses")
+
+        if "+libxml2" in spec:
+            config_args.append("--with-libxml2-prefix={0}".format(spec["libxml2"].prefix))
+        else:
+            config_args.append("--with-included-libxml")
+
+        if "+bzip2" not in spec:
+            config_args.append("--without-bzip2")
+
+        if "+xz" not in spec:
+            config_args.append("--without-xz")
+
+        if "+libunistring" in spec:
+            config_args.append(
+                "--with-libunistring-prefix={0}".format(spec["libunistring"].prefix)
+            )
+        else:
+            config_args.append("--with-included-libunistring")
+
+        return config_args
+
+    @property
+    def libs(self):
+        return find_libraries(
+            ["libasprintf", "libgettextlib", "libgettextpo", "libgettextsrc", "libintl"],
+            root=self.prefix,
+            recursive=True,
+        )
diff --git a/spack_repo/packages/gettext/test-verify-parallel-make-check.patch b/spack_repo/packages/gettext/test-verify-parallel-make-check.patch
new file mode 100644
index 0000000000000000000000000000000000000000..5f5aebcbfd7dc5a2c94f78732994cf254648c292
--- /dev/null
+++ b/spack_repo/packages/gettext/test-verify-parallel-make-check.patch
@@ -0,0 +1,61 @@
+2017-04-20  Bruno Haible  <bruno@clisp.org>
+
+        verify tests: Fix spurious failure with parallel make.
+        * tests/test-verify.sh: Build test-verify-try.o, not test-verify.o.
+        * tests/test-verify-try.c: New file.
+        Reported by Adam James Stewart <ajstewart@anl.gov>.
+
+diff --git a/gettext-tools/gnulib-tests/test-verify.sh b/gettext-tools/gnulib-tests/test-verify.sh
+index 3e76761..1e75d55 100755
+--- a/gettext-tools/gnulib-tests/test-verify.sh
++++ b/gettext-tools/gnulib-tests/test-verify.sh
+@@ -7,8 +7,9 @@ unset MALLOC_PERTURB_
+ 
+ # Rather than figure out how to invoke the compiler with the right
+ # include path ourselves, we let make do it:
+-(cd "$initial_cwd_" && rm -f test-verify.o \
+-    && $MAKE test-verify.o >/dev/null 2>&1) \
++(cd "$initial_cwd_" \
++ && rm -f test-verify-try.o \
++ && $MAKE test-verify-try.o >/dev/null 2>&1) \
+   || skip_ "cannot compile error-free"
+ 
+ # Now, prove that we encounter all expected compilation failures:
+@@ -16,8 +17,8 @@ unset MALLOC_PERTURB_
+ : >err
+ for i in 1 2 3 4 5; do
+   (cd "$initial_cwd_"
+-   rm -f test-verify.o
+-   $MAKE CFLAGS=-DEXP_FAIL=$i test-verify.o) >>out 2>>err \
++   rm -f test-verify-try.o
++   $MAKE CFLAGS=-DEXP_FAIL=$i test-verify-try.o) >>out 2>>err \
+   && { warn_ "compiler didn't detect verification failure $i"; fail=1; }
+ done
+ 
+diff --git a/gettext-tools/gnulib-tests/test-verify-try.c b/gettext-tools/gnulib-tests/test-verify-try.c
+new file mode 100644
+index 0000000..362fb01
+--- /dev/null
++++ b/tests/test-verify-try.c
+@@ -0,0 +1,21 @@
++/* Test the "verify" module.
++
++   Copyright (C) 2017 Free Software Foundation, Inc.
++
++   This program is free software: you can redistribute it and/or modify
++   it under the terms of the GNU General Public License as published by
++   the Free Software Foundation; either version 3 of the License, or
++   (at your option) any later version.
++
++   This program is distributed in the hope that it will be useful,
++   but WITHOUT ANY WARRANTY; without even the implied warranty of
++   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
++   GNU General Public License for more details.
++
++   You should have received a copy of the GNU General Public License
++   along with this program.  If not, see <http://www.gnu.org/licenses/>.  */
++
++/* This is a separate source file, so that the execution of test-verify.sh
++   does not interfere with the building of the 'test-verify' program.  */
++
++#include "test-verify.c"
diff --git a/spack_repo/packages/icon-nwp/package.py b/spack_repo/packages/icon-nwp/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..e97a858e9c82f407ebf4e74760c9c266b3447377
--- /dev/null
+++ b/spack_repo/packages/icon-nwp/package.py
@@ -0,0 +1,221 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from pathlib import Path
+
+from spack.package import *
+
+
+class IconNwp(Package):
+    """
+    Recipe to build ICON-NWP using the LMU gitlab repository.
+    """
+
+    git = "ssh://git@gitlab.dkrz.de/icon/icon-nwp.git"
+    homepage = "https://code.mpimet.mpg.de/projects/iconpublic"
+    # maintainers("oriol.tinto")
+
+    # Version w2w-B6 points to a different repository and branch)
+    version("w2w-B6", git="ssh://git@gitlab.physik.uni-muenchen.de/w2w/icon-w2w.git", branch="icon-w2w/icon-nwp-B6")
+    # FIXME: The ugly configuration system in older icon versions prevents us from using this package with it.
+    # version("2.5.0-nwp3", branch="icon-nwp/op-release-2.5.0-nwp3")
+    version("2.6.4-nwp3", branch="icon-nwp/op-release-2.6.4-nwp3")
+    version("2.6.5-nwp0", branch="icon-nwp/op-release-2.6.5-nwp0")
+    version("master", branch="master")
+    version("psp", branch="icon-nwp/icon-nwp-psp")
+
+    variant("lmu", default=False, description="if git.url and submodules should be patched to use the LMU mirrors")
+
+    # Dependencies
+    depends_on("mpi")
+    depends_on("netcdf-c")
+    depends_on("netcdf-fortran")
+    depends_on("eccodes+fortran")
+    depends_on("libxml2")
+
+    # Openblas? best way of doing it?
+    depends_on("openblas", when="%gcc")
+    depends_on("intel-mkl", when="%intel")
+
+    # Extra dependencies for B6, including yaml, and the hdf5 filters for compression.
+    depends_on("libyaml", when="@w2w-B6")
+    depends_on("hdf5-blosc", when="@w2w-B6")
+    depends_on("h5z-zfp", when="@w2w-B6")
+    depends_on("sz~hdf5", when="@w2w-B6")
+    depends_on("sz3~hdf5", when="@w2w-B6")
+
+    phases = ["configure", "build", "install"]
+
+    def do_fetch(self, mirror_only=False):
+        if "+lmu" in self.spec:
+            self.fetcher[0].url = self.git = "ssh://git@gitlab.physik.uni-muenchen.de/w2w/icon.git"
+        super(IconNwp, self).do_fetch(mirror_only)
+
+    def patch(self):
+        # Run git submodule update
+        git = which("git")
+        if "+lmu" in self.spec:
+            self.patch_submodules()
+        git("submodule", "update", "--init", "--recursive")
+
+    def patch_submodules(self):
+        """
+        Because of the lack of access rights to the original submodule repositories,
+        we patch the gitmodules file to point to a different mirror.
+        """
+        git_submodules_file = Path().cwd() / ".gitmodules"
+
+        git_mirror = "git@gitlab.lrz.de:dkrz-mirror"
+
+        git_modules_patch = f"""
+        [submodule "externals/mtime"]
+                path = externals/mtime
+                url = {git_mirror}/libmtime.git
+        [submodule "externals/jsbach"]
+                path = externals/jsbach
+                url = {git_mirror}/jsbach.git
+        [submodule "externals/yac"]
+                path = externals/yac
+                url = {git_mirror}/YAC.git
+        [submodule "externals/self"]
+                path = externals/self
+                url = {git_mirror}/libself.git
+        [submodule "externals/tixi"]
+                path = externals/tixi
+                url = {git_mirror}/libtixi.git
+        [submodule "externals/yaxt"]
+                path = externals/yaxt
+                url = {git_mirror}/yaxt.git
+        [submodule "externals/rte-rrtmgp"]
+                path = externals/rte-rrtmgp
+                url = https://github.com/earth-system-radiation/rte-rrtmgp.git
+        [submodule "externals/cub"]
+                path = externals/cub
+                url = https://github.com/NVlabs/cub.git
+        [submodule "externals/omni-xmod-pool"]
+                path = externals/omni-xmod-pool
+                url = https://github.com/claw-project/omni-xmod-pool.git
+        [submodule "externals/cdi"]
+                path = externals/cdi
+                url = {git_mirror}/libcdi.git
+        [submodule "externals/sct"]
+                path = externals/sct
+                url = {git_mirror}/sct.git
+        [submodule "externals/ecrad"]
+                path = externals/ecrad
+                url = {git_mirror}/libecrad.git
+        [submodule "externals/dace_icon"]
+                path = externals/dace_icon
+                url = {git_mirror}/dace-icon-interface.git
+        [submodule "externals/emvorado"]
+                path = externals/emvorado
+                url = {git_mirror}/emvorado-for-icon.git
+        [submodule "utils/mkexp"]
+                path = utils/mkexp
+                url = https://git.mpimet.mpg.de/public/mkexp
+        [submodule "externals/art"]
+                path = externals/art
+                url = {git_mirror}/art.git
+        [submodule "externals/ppm"]
+                path = externals/ppm
+                url = https://gitlab.dkrz.de/jahns/ppm.git
+        [submodule "externals/probtest"]
+                path = externals/probtest
+                url = {git_mirror}/cscs-sw_probtest.git
+        """
+
+        # Replace the content of the original file with the patch
+        with git_submodules_file.open("w") as out_f:
+            out_f.write(git_modules_patch)
+
+    def setup_build_environment(self, env):
+        spec = self.spec
+
+        # Some environment variables to set
+        env_variables_to_set = {
+            "CC": spec["mpi"].mpicc,
+            "FC": spec["mpi"].mpifc,
+            "F77": spec["mpi"].mpif77,
+        }
+        for variable, value in env_variables_to_set.items():
+            env.set(variable, value)
+
+    def configure(self, spec, prefix):
+        spec = self.spec
+        print(spec["mpi"].mpifc)
+
+        libs = [
+            f"-L{spec['netcdf-c'].prefix.lib} -lnetcdf ",  # netcdf-c libs
+            f"-L{spec['netcdf-fortran'].prefix.lib} -lnetcdff",  # netcdf-fortran libs
+            f"-L{spec['eccodes'].prefix.lib} -leccodes_f90 -leccodes",
+            f"-L{spec['libxml2'].prefix.lib} -lxml2",  # XML2 libs
+        ]
+
+        if self.spec.satisfies("%gcc"):
+            libs.append("-lopenblas")
+        elif self.spec.satisfies("%intel"):
+            libs.append("-qmkl=sequential")
+
+        if self.spec.version == Version("w2w-B6"):
+            libs.append("-lyaml")
+
+        mtune = "generic"
+        INCLUDES = f"-I{spec['libxml2'].prefix}/include/libxml2"
+        options = [
+            f"CC={spec['mpi'].mpicc}",
+            f"FC={spec['mpi'].mpifc}",
+            f"CFLAGS=-g -mpc64 {INCLUDES}",
+            f"ICON_CFLAGS=-O3 -g -mtune={mtune}",
+            f"ICON_BUNDLED_CFLAGS=-O3 -mtune={mtune}",
+            "FCFLAGS=-std=legacy -fmodule-private -fimplicit-none -fmax-identifier-length=63 -Wall -Wcharacter-truncation -Wconversion -Wunderflow -Wunused-parameter -Wno-surprising -fall-intrinsics -g -mpc64 -w",
+            "ICON_FCFLAGS=-fbacktrace -fbounds-check -fstack-protector-all -finit-real=nan -finit-integer=-2147483648 -finit-character=127 -w -O2",
+            f"ICON_OCEAN_FCFLAGS=-O3 -mtune={mtune}",
+            f"LDFLAGS={' '.join(libs)}",
+            f"LIBS={' '.join(libs)}",
+            f"--prefix={prefix}",
+            f"--enable-grib2",
+        ]
+
+        # For some reason there's a problem with OpenMPI with gcc@11.3.0 which makes the configuration fail.
+        if self.spec.compiler.name == "gcc" and self.spec.compiler.version == Version("11.3.0"):
+            options.append("--enable-mpi-checks=no")
+        configure(*options)
+
+    def build(self, spec, prefix):
+        make()
+
+    def install(self, spec, prefix):
+        make("install")
+
+        # Create extra folders with the data and the docs
+        self.make_extra_folders(prefix)
+
+    def setup_run_environment(self, env):
+        env.set("ICON_BASE_PATH", self.spec.prefix)
+        env.set("ICON_DATA_PATH", self.spec.prefix.join("data"))
+        env.set("ICON_DOCS_PATH", self.spec.prefix.join("doc"))
+
+    def make_extra_folders(self, prefix):
+        mkdir = which("mkdir")
+        rsync = which("rsync")
+        curl = which("curl")
+
+        # copy executables and documentation
+        mkdir("-p", f"{prefix}/data")
+        mkdir("-p", f"{prefix}/doc")
+        mkdir("-p", f"{prefix}/run")
+        rsync("-av", "data/", f"{prefix}/data")
+        rsync("-av", "run/", f"{prefix}/run")
+        rsync("-av", "--include='*.pdf'", "--exclude='*.*'", "doc/", f"{prefix}/doc/")
+        curl(
+            "https://code.mpimet.mpg.de/attachments/download/19568/ICON_tutorial_2019.pdf",
+            "--output",
+            f"{prefix}/doc/ICON_tutorial_2019.pdf",
+        )
+        curl(
+            "http://www.cosmo-model.org/content/model/documentation/core/emvorado_userguide.pdf",
+            "--output",
+            f"{prefix}/doc/emvorado_userguide.pdf",
+        )
diff --git a/spack_repo/packages/sz3/package.py b/spack_repo/packages/sz3/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..b2bcbd3e9b4c15a4a641e974539e08db4723daa3
--- /dev/null
+++ b/spack_repo/packages/sz3/package.py
@@ -0,0 +1,53 @@
+# Copyright 2013-2023 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class Sz3(CMakePackage):
+    """SZ3 is the next generation of the SZ compressor framework"""
+
+    homepage = "https://github.com/szcompressor/SZ3"
+    git = "https://github.com/szcompressor/SZ3"
+
+    # maintainers("disheng222")
+    tags = ["e4s"]
+
+    version("master")
+    version("3.1.7", commit="c49fd17f2d908835c41000c1286c510046c0480e")
+    version("3.1.5.4", commit="4c6ddf628f27d36b28d1bbda02174359cd05573d")
+    version("3.1.5.1", commit="5736a63b917e439dd62248b4ff6234e96726af5d")
+    version("3.1.3.1", commit="323cb17b412d657c4be681b52c34beaf933fe7af")
+    version("3.1.3", commit="695dff8dc326f3b165f6676d810f46add088a585")
+
+    variant("hdf5", default=False, description="enable hdf5 filter support")
+    variant("mdz", default=True, description="build mdz executable")
+
+    depends_on("zstd")
+    depends_on("gsl")
+    depends_on("pkgconfig")
+    depends_on("hdf5", when="+hdf5")
+
+    def setup_run_environment(self, env):
+        if "+hdf5" in self.spec:
+            env.prepend_path("HDF5_PLUGIN_PATH", self.prefix.lib64)
+
+    def cmake_args(self):
+        return [
+            "-DSZ3_USE_BUNDLED_ZSTD=OFF",
+            "-DSZ3_DEBUG_TIMINGS=OFF",
+            self.define_from_variant("BUILD_MDZ", "mdz"),
+            self.define_from_variant("BUILD_H5Z_FILTER", "hdf5"),
+        ]
+
+    def test(self):
+        if self.spec.satisfies("@:3.1.6"):
+            print("smoke tests are only supported on 3.1.7 and later, skipping")
+            return
+
+        self.run_test(self.prefix.bin.sz3_smoke_test, purpose="sz3 works")
+
+        if "+mdz" in self.spec:
+            self.run_test(self.prefix.bin.mdz_smoke_test, purpose="mdz works")
diff --git a/spack_repo/repo.yaml b/spack_repo/repo.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..332f5cbe66363ced0949bf58d6c3c5035027a39c
--- /dev/null
+++ b/spack_repo/repo.yaml
@@ -0,0 +1,2 @@
+repo:
+  namespace: 'autosubmit-icon-repository'
diff --git a/templates/common/build_icon.sh b/templates/common/build_icon.sh
index 29442e138ca163b608b9fd294d2c414a036ada42..2f53a0ada2a5b07207e15d977e6a7135f722d30b 100644
--- a/templates/common/build_icon.sh
+++ b/templates/common/build_icon.sh
@@ -1,10 +1,8 @@
 # Get some variables provided by autosubmit.
 # TODO: What do we do to ensure that these variables are defined in the proj file?
 WORKDIR=%HPCROOTDIR%
-ICON_VERSION=%ICON_VERSION%
-SPACK_URL=%spack.url%
-SPACK_BRANCH=%spack.branch%
-SPACK_COMPILER=%spack.compiler%
+ICON_VERSION=%ICON.VERSION%
+COMPILER=%SPACK.COMPILER%
 
 # If the workdir directory does not exist create it
 if [ ! -d ${WORKDIR} ]; then
@@ -12,44 +10,46 @@ if [ ! -d ${WORKDIR} ]; then
 fi
 
 # Go to the working directory
-cd ${WORKDIR}
+cd ${WORKDIR} || exit
+
 
 . ${WORKDIR}/proj/platforms/common/spack_utils.sh
-load_spack
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%"
 
-if [ $(
-  spack find icon-nwp@${ICON_VERSION} &>/dev/null
-  echo $?
-) -ne 0 ]; then
-  echo "Installing icon-nwp@${ICON_VERSION}."
+if [ ! $(rpm -qa | grep bzip2) ]; then
+  spack install --reuse bzip2
+  spack load --first bzip2
+fi
 
-  if [[ $(spack compiler info ${SPACK_COMPILER}) ]]; then
-    echo "Found Compiler"
+SPACK_BUILD_ICON="%ICON.BUILD_CMD%"
+if [ ! -z "$SPACK_BUILD_ICON" ]; then
+  echo "Installing ICON with spack!"
+  echo "cmd=$SPACK_BUILD_ICON"
+  # In case the autosubmit repository with the icon-nwp receipt doesn't exist, add it
+  if [[ $(spack repo list | grep "${WORKDIR}/proj/spack_repo") ]]; then
+    echo "icon spack repo was already added to repo list"
   else
-    echo "could not find compiler, try to install it... this may take a while"
-    spack add ${SPACK_COMPILER}
-    spack install
-    spack compiler add $(spack location --install-dir $SPACK_COMPILER)
+    spack repo add ${WORKDIR}/proj/spack_repo
   fi
-
-  spack add ucx%${SPACK_COMPILER}+dc+dm+ib_hw_tm+mlx5_dv+rc+rdmacm+thread_multiple+ud+verbs~cuda
-  spack add openmpi%${SPACK_COMPILER}+pmi+legacylaunchers~cuda schedulers=slurm fabrics=ucx
-  spack add icon-nwp@${ICON_VERSION}%${SPACK_COMPILER} ^openmpi%${SPACK_COMPILER}
-  spack install
+  spack spec $SPACK_BUILD_ICON
+  spack install --reuse $SPACK_BUILD_ICON
+  #TODO: had some problems with spack load when more than one version is available, adding --first to overcome that
+  # although in principle we should not install the model if its already installed.
+  spack load --first "icon-nwp@${ICON_VERSION}%${COMPILER}"
 else
-  echo "icon-nwp@${ICON_VERSION} already installed!"
+  echo "\%icon.build_cmd\% is not defined. If you want to compile icon with spack, please provide a spack compile instruction string in your build.yml"
 fi
 
 # Need to get ECCODES DWD definitions:
-eccodes_version=$(spack find eccodes | grep eccodes@ | cut -d "@" -f 2)
+eccodes_version=$(spack spec icon-nwp@${ICON_VERSION}%${COMPILER} | grep eccodes | grep -o "@.*%" | grep -o "[0-9\.]*")
 
 definitions_tar_file=eccodes_definitions.edzw-${eccodes_version}-1.tar.bz2
-if [ ! -f ${definitions_tar_file} ]; then
+if [ ! -f "${definitions_tar_file}" ]; then
   defs_url=https://opendata.dwd.de/weather/lib/grib/${definitions_tar_file}
-  wget ${defs_url}
+  wget "${defs_url}"
 
   # Decompress definitions file
-  tar -xf ${definitions_tar_file}
+  tar -xf "${definitions_tar_file}"
   # Create a file containing the environment variable that needs to be set in order to use DWD's definitions:
   echo "export ECCODES_DEFINITION_PATH=${WORKDIR}/definitions.edzw-${eccodes_version}-1" >eccodes_defs.env
 fi
diff --git a/templates/common/build_python_environment.sh b/templates/common/build_python_environment.sh
index 6388ef7bddafa8d48c1cf6a3198f0c130c3d8393..102f9008c686c63814091696edbcf8f409323fee 100644
--- a/templates/common/build_python_environment.sh
+++ b/templates/common/build_python_environment.sh
@@ -2,41 +2,29 @@
 # TODO: What do we do to ensure that these variables are defined in the proj file?
 WORKDIR=%HPCROOTDIR%
 ICON_VERSION=%ICON_VERSION%
-SPACK_URL=%spack.url%
-SPACK_BRANCH=%spack.branch%
-SPACK_COMPILER=%spack.compiler%
-
-# If the workdir directory does not exist create it
-if [ ! -d ${WORKDIR} ]; then
-  mkdir -p ${WORKDIR}
-fi
+SPACK_URL=%SPACK.URL%
+SPACK_BRANCH=%SPACK.BRANCH%
+SPACK_COMPILER=%SPACK.COMPILER%
 
+PYTHON_VERSION=%PYTHON_ENVIRONMENT.PYTHON_VERSION%
 # Go to the working directory
 cd ${WORKDIR}
 
-# Check if experiment's spack installation already exists, if it doesn't, clone it.
-SPACK_ENV=spack/share/spack/setup-env.sh
-if [ ! -f ${SPACK_ENV} ]; then
-  echo "Spack folder not found!"
-  exit 1
-fi
+. ${WORKDIR}/proj/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%"
 
-# Setup the spack environment
-source ${SPACK_ENV}
 
 # Use spack to get a recent enough version of python3
-if [ $( spack find python@3.8: &>/dev/null
-  echo $?
-) -ne 0 ]; then
+if [ $(spack find python@${PYTHON_VERSION}: &>/dev/null ) ]; then
+  echo "python@${PYTHON_VERSION} already installed!"
+else
   echo "Installing a version of python3"
   # Compile openmpi with schedulers=slurm
-  spack install python@3.8:
-else
-  echo "python@3.8: already installed!"
+  spack install python@${PYTHON_VERSION}
 fi
 
 # Load the python module
-spack load python@3.8:
+spack load --first python@${PYTHON_VERSION}
 
 PYTHON_ENVIRONMENT_FOLDER=%python_environment.folder_name%
 
@@ -56,9 +44,16 @@ ln -sf $(which python3) ${WORKDIR}/python3
 requirements="%python_environment.requirements%"
 
 # Convert list with python format to a bash array
-requirements=($( echo ${requirements} | sed "s/'//g" | tr -d '[],'))
+requirements=($(echo ${requirements} | sed "s/'//g" | tr -d '[],'))
+
+#TODO: Shouldn't be necessary but it is for now to overcome an issue with a repetition of the requirements.
+# Use sort and uniq to get the unique elements
+unique_requirements=($(printf "%s\n" "${requirements[@]}" | sort -u))
+
+# Print the unique elements
+echo "${unique_requirements[@]}"
 
 # Install requirements.
-for requirement in ${requirements[@]} ; do
+for requirement in "${unique_requirements[@]}"; do
   python -m pip install ${requirement}
 done
diff --git a/templates/common/compress.py b/templates/common/compress.py
index 996370c2f14f7443200b88b8ae04f825d0603893..408df0590fb89c25c4eac80f5216b840345b7efe 100644
--- a/templates/common/compress.py
+++ b/templates/common/compress.py
@@ -2,6 +2,7 @@ import glob
 import logging
 
 # Set logging level to info.
+logging.basicConfig(level=logging.INFO)
 logger = logging.getLogger("compress")
 logger.setLevel(logging.INFO)
 
@@ -17,7 +18,7 @@ def compress_outputs():
     WORKDIR = Path("%HPCROOTDIR%")
     STARTDATE = "%SDATE%"
     MEMBER = "%MEMBER%"
-    output_file_names = "%simulation.output_file_namesº%"
+    output_file_names = "%SIMULATION.OUTPUT_FILE_NAMES%"
 
     # Define rundir
     RUNDIR = WORKDIR / STARTDATE / MEMBER
@@ -25,11 +26,14 @@ def compress_outputs():
     # Get a list of file names:
     output_file_names = [RUNDIR / f for f in output_file_names.split(" ") if f.strip()]
 
+    logger.info(f"File patterns: {output_file_names}")
     output_files = []
     for file_pattern in output_file_names:
         output_files.extend(glob.glob(file_pattern.as_posix()))
 
+
     output_files = [Path(f) for f in output_files]
+    logger.info(f"Output files: {output_files}")
 
     if not output_files:
         logger.warning("The list of files is empty!")
diff --git a/templates/common/run_icon.sh b/templates/common/run_icon.sh
index 1a5fec30d901590072f9aa3135292dba299a6bfd..a9075818bfa3ab9683ff65ba3b687268b1d98f6a 100644
--- a/templates/common/run_icon.sh
+++ b/templates/common/run_icon.sh
@@ -12,10 +12,11 @@ RUNDIR=${WORKDIR}/${STARTDATE}/${MEMBER}
 cd ${RUNDIR}
 
 # Activate spack
-SPACK_ENV=${WORKDIR}/spack/share/spack/setup-env.sh
-source ${SPACK_ENV}
+. ${WORKDIR}/proj/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%"
+
 # Load icon module
-spack load icon-nwp@%ICON_VERSION%
+spack load --first icon-nwp@%ICON_VERSION%
 
 # Set environment variable for eccodes-dwd definitions:
 source ${WORKDIR}/eccodes_defs.env
diff --git a/templates/common/transfer.sh b/templates/common/transfer.sh
index 795feea3e7a497d1b9f495cc05e93cc6c7d5f6ac..abf80971e85ae703fc1d5b8d85306f8674e44ae8 100644
--- a/templates/common/transfer.sh
+++ b/templates/common/transfer.sh
@@ -4,7 +4,7 @@
 WORKDIR=%HPCROOTDIR%
 STARTDATE=%SDATE%
 MEMBER=%MEMBER%
-OUTPUT_FILES="%simulation.OUTPUT_FILES%"
+OUTPUT_FILES="%SIMULATION.OUTPUT_FILE_NAMES%"
 HPCUSER=%HPCUSER%
 HPCHOST=%HPCHOST%
 
diff --git a/templates/real-from-dwd-ana/prepare_date.sh b/templates/real-from-dwd-ana/prepare_date.sh
deleted file mode 100644
index 4152601c3603607a80529ea090ab4529d7e4b1aa..0000000000000000000000000000000000000000
--- a/templates/real-from-dwd-ana/prepare_date.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash -l
-
-# Get some variables provided by autosubmit.
-WORKDIR=%HPCROOTDIR%
-STARTDATE=%SDATE%
-
-# Define date directory, create it and go there
-COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
-# Create member folder and go there
-mkdir -p ${COMMON_DATE_FOLDER}
-cd ${COMMON_DATE_FOLDER} || exit
-
-# some settings
-AN_MEMBER=$(printf "%03d" %initial_conditions.member%)
-INITIAL_CONDITIONS_PARENT_FOLDER=%initial_conditions.parent_folder%
-
-INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:6}/${STARTDATE:0:8}T00
-
-AN_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igaf*00.m${AN_MEMBER}.grb" | sort | tail -n 1)
-FG_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igfff00030000.m${AN_MEMBER}.grb" | sort | tail -n 1)
-
-if [ ! -f "${AN_SOURCE}" ]; then
-  echo "Analysis file for date ${STARTDATE} not found!"
-  exit 1
-fi
-
-if [ ! -f "${FG_SOURCE}" ]; then
-  echo "FG file for date ${STARTDATE} not found!"
-  exit 1
-fi
-
-AN_FILE=$(basename "${AN_SOURCE}")
-FG_FILE=$(basename "${FG_SOURCE}")
-
-# Save filenames to be used later by other scripts.
-echo "${AN_FILE}" > an_file.txt
-echo "${FG_FILE}" > fg_file.txt
-
-# Copy the first-guess and analysis files.
-cp "${FG_SOURCE}" "${FG_FILE}"
-cp "${AN_SOURCE}" "${AN_FILE}"
-
-# Change permissions to read only.
-chmod 440 ./*
diff --git a/templates/real-from-dwd-ana/prepare_date_local.sh b/templates/real-from-dwd-ana/prepare_date_local.sh
new file mode 100644
index 0000000000000000000000000000000000000000..ecbaa23ebc59a23072bd9707e40c80e4efd77bfc
--- /dev/null
+++ b/templates/real-from-dwd-ana/prepare_date_local.sh
@@ -0,0 +1,74 @@
+#!/bin/bash -l
+
+IS_LOCAL=%SIMULATION.INITIAL_CONDITIONS.LOCAL%
+
+if [ "${IS_LOCAL}" == "True" ]; then
+  # Get some variables provided by autosubmit.
+  WORKDIR=%HPCROOTDIR%
+  STARTDATE=%SDATE%
+  HPCUSER=%HPCUSER%
+  HPCHOST=%HPCHOST%
+
+  # Define date directory, create it and go there
+  COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+
+  AN_MEMBER=$(printf "%03d" %SIMULATION.INITIAL_CONDITIONS.MEMBER%)
+  INITIAL_CONDITIONS_PARENT_FOLDER=%SIMULATION.INITIAL_CONDITIONS.PARENT_FOLDER%
+  INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:6}/${STARTDATE:0:8}T00
+
+  AN_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igaf*00.m${AN_MEMBER}.grb" | sort | tail -n 1)
+  FG_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igfff00030000.m${AN_MEMBER}.grb" | sort | tail -n 1)
+
+  AN_FILE=$(basename "${AN_SOURCE}")
+  FG_FILE=$(basename "${FG_SOURCE}")
+
+  # Find files
+  if [ ! -f "${AN_SOURCE}" ]; then
+    echo "Analysis file for date ${STARTDATE} not found!"
+    exit 1
+  fi
+
+  if [ ! -f "${FG_SOURCE}" ]; then
+    echo "FG file for date ${STARTDATE} not found!"
+    exit 1
+  fi
+
+
+  # Check if we copy the initial conditions from the local system or the remote one
+  if [ "${IS_LOCAL}" == "True" ]; then
+    # Create member folder
+    ssh "${HPCUSER}@${HPCHOST}" mkdir -p ${COMMON_DATE_FOLDER}
+
+    # Save filenames to be used later by other scripts.
+    echo "${AN_FILE}" > an_file.txt
+    echo "${FG_FILE}" > fg_file.txt
+    rsync -v an_file.txt "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/an_file.txt"
+    rsync -v fg_file.txt "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/fg_file.txt"
+
+    # Remove temporary files.
+    rm an_file.txt
+    rm fg_file.txt
+
+    # Copy the first-guess and analysis files.
+    rsync -v "${FG_SOURCE}" "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/${FG_FILE}"
+    rsync -v "${AN_SOURCE}" "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/${AN_FILE}"
+
+    # Change permissions to read only.
+    ssh "${HPCUSER}@${HPCHOST}" chmod 440 "${COMMON_DATE_FOLDER}/*"
+  else
+    # Create member folder and go there
+    mkdir -p ${COMMON_DATE_FOLDER}
+    cd ${COMMON_DATE_FOLDER} || exit
+
+    # Save filenames to be used later by other scripts.
+    echo "${AN_FILE}" > an_file.txt
+    echo "${FG_FILE}" > fg_file.txt
+
+    # Copy the first-guess and analysis files.
+    cp "${FG_SOURCE}" "${FG_FILE}"
+    cp "${AN_SOURCE}" "${AN_FILE}"
+
+    # Change permissions to read only.
+    chmod 440 ./*
+  fi
+fi
\ No newline at end of file
diff --git a/templates/real-from-dwd-ana/prepare_date_remote.sh b/templates/real-from-dwd-ana/prepare_date_remote.sh
new file mode 100644
index 0000000000000000000000000000000000000000..8fec0cec09abbd5b6487628a67c43161abe78515
--- /dev/null
+++ b/templates/real-from-dwd-ana/prepare_date_remote.sh
@@ -0,0 +1,73 @@
+#!/bin/bash -l
+
+IS_LOCAL=%SIMULATION.INITIAL_CONDITIONS.LOCAL%
+
+if [ "${IS_LOCAL}" == "0" ]; then
+  # Get some variables provided by autosubmit.
+  WORKDIR=%HPCROOTDIR%
+  STARTDATE=%SDATE%
+  HPCUSER=%HPCUSER%
+  HPCHOST=%HPCHOST%
+  # Define date directory, create it and go there
+  COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+
+  AN_MEMBER=$(printf "%03d" %SIMULATION.INITIAL_CONDITIONS.MEMBER%)
+  INITIAL_CONDITIONS_PARENT_FOLDER=%SIMULATION.INITIAL_CONDITIONS.PARENT_FOLDER%
+  INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:6}/${STARTDATE:0:8}T00
+
+  AN_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igaf*00.m${AN_MEMBER}.grb" | sort | tail -n 1)
+  FG_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igfff00030000.m${AN_MEMBER}.grb" | sort | tail -n 1)
+
+  AN_FILE=$(basename "${AN_SOURCE}")
+  FG_FILE=$(basename "${FG_SOURCE}")
+
+  # Find files
+  if [ ! -f "${AN_SOURCE}" ]; then
+    echo "Analysis file for date ${STARTDATE} not found!"
+    exit 1
+  fi
+
+  if [ ! -f "${FG_SOURCE}" ]; then
+    echo "FG file for date ${STARTDATE} not found!"
+    exit 1
+  fi
+
+
+  # Check if we copy the initial conditions from the local system or the remote one
+  if [ "${IS_LOCAL}" == "True" ]; then
+    # Create member folder
+    ssh "${HPCUSER}@${HPCHOST}" mkdir -p ${COMMON_DATE_FOLDER}
+
+    # Save filenames to be used later by other scripts.
+    echo "${AN_FILE}" > an_file.txt
+    echo "${FG_FILE}" > fg_file.txt
+    rsync -v an_file.txt "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/an_file.txt"
+    rsync -v fg_file.txt "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/fg_file.txt"
+
+    # Remove temporary files.
+    rm an_file.txt
+    rm fg_file.txt
+
+    # Copy the first-guess and analysis files.
+    rsync -v "${FG_SOURCE}" "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/${FG_FILE}"
+    rsync -v "${AN_SOURCE}" "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/${AN_FILE}"
+
+    # Change permissions to read only.
+    ssh "${HPCUSER}@${HPCHOST}" chmod 440 "${COMMON_DATE_FOLDER}/*"
+  else
+    # Create member folder and go there
+    mkdir -p ${COMMON_DATE_FOLDER}
+    cd ${COMMON_DATE_FOLDER} || exit
+
+    # Save filenames to be used later by other scripts.
+    echo "${AN_FILE}" > an_file.txt
+    echo "${FG_FILE}" > fg_file.txt
+
+    # Copy the first-guess and analysis files.
+    cp "${FG_SOURCE}" "${FG_FILE}"
+    cp "${AN_SOURCE}" "${AN_FILE}"
+
+    # Change permissions to read only.
+    chmod 440 ./*
+  fi
+fi
\ No newline at end of file
diff --git a/templates/real-from-dwd-ana/prepare_experiment.sh b/templates/real-from-dwd-ana/prepare_experiment.sh
index 5d9b03dcf54d7cb2c3e32780033d78d6ca64d265..c1c1b4401e89ff0d94a5708fdd8093b913b49f72 100644
--- a/templates/real-from-dwd-ana/prepare_experiment.sh
+++ b/templates/real-from-dwd-ana/prepare_experiment.sh
@@ -8,11 +8,11 @@ EXTERNAL_PARAMETERS_FILE=%simulation.external_parameters_filename%
 
 
 # Activate spack
-SPACK_ENV=${WORKDIR}/spack/share/spack/setup-env.sh
-source ${SPACK_ENV}
+. ${WORKDIR}/proj/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%"
 
 # Load icon module needed to retrieve some data
-spack load icon-nwp@%ICON_VERSION%
+spack load --first icon-nwp@%ICON_VERSION%
 
 # Create a folder for the common inidata and go there
 COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
diff --git a/templates/real-from-dwd-ana/prepare_chunk.py b/templates/real-from-dwd-ana/prepare_namelist.py
similarity index 100%
rename from templates/real-from-dwd-ana/prepare_chunk.py
rename to templates/real-from-dwd-ana/prepare_namelist.py
diff --git a/templates/real-from-ideal/prepare_experiment.sh b/templates/real-from-ideal/prepare_experiment.sh
index b25b40a9fa6eeca08222eb7fb8e40f15c7bf2901..7458bf96668ff85d190cea43d44add67ee26f878 100644
--- a/templates/real-from-ideal/prepare_experiment.sh
+++ b/templates/real-from-ideal/prepare_experiment.sh
@@ -6,11 +6,11 @@ DYNAMICS_GRID_FILENAME=%simulation.dynamics_grid_filename%
 RADIATION_GRID_FILE=%simulation.radiation_grid_filename%
 
 # Activate spack
-SPACK_ENV=${WORKDIR}/spack/share/spack/setup-env.sh
-source ${SPACK_ENV}
+. ${WORKDIR}/proj/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%"
 
 # Load icon module needed to retrieve some data
-spack load icon-nwp@%ICON_VERSION%
+spack load --first icon-nwp@%ICON_VERSION%
 
 # Create a folder for the common inidata and go there
 COMMON_INIDATA_FOLDER=${WORKDIR}/inidata
diff --git a/templates/real-from-ideal/prepare_chunk.py b/templates/real-from-ideal/prepare_namelist.py
similarity index 100%
rename from templates/real-from-ideal/prepare_chunk.py
rename to templates/real-from-ideal/prepare_namelist.py
diff --git a/templates/real-from-ideal/run_ideal.sh b/templates/real-from-ideal/run_ideal.sh
index cec0251755487445e1b3a510023a5856d4912e88..e9959aa2bf6a5e727a1dd259486f302884b94eaf 100644
--- a/templates/real-from-ideal/run_ideal.sh
+++ b/templates/real-from-ideal/run_ideal.sh
@@ -11,10 +11,11 @@ RUNDIR=${WORKDIR}/${STARTDATE}/ideal
 cd ${RUNDIR}
 
 # Activate spack
-SPACK_ENV=${WORKDIR}/spack/share/spack/setup-env.sh
-source ${SPACK_ENV}
+. ${WORKDIR}/proj/platforms/common/spack_utils.sh
+load_spack "%spack.init%" "%spack.root%" "%spack.url%" "%spack.branch%" "%spack.externals%" "%spack.compiler%" "%spack.disable_local_config%" "%spack.user_cache_path%" "%spack.user_config_path%"
+
 # Load icon module
-spack load icon-nwp@%ICON_VERSION%
+spack load --first icon-nwp@%ICON_VERSION%
 
 # Set environment variable for eccodes-dwd definitions:
 source ${WORKDIR}/eccodes_defs.env