diff --git a/conf/real-from-d2-ana/jobs.yml b/conf/real-from-d2-ana/jobs.yml
index 42982da50dcd50bbb954686143509c883e12d562..cacd23f9b305e54a15ccc2009dda54698c5ef8ce 100644
--- a/conf/real-from-d2-ana/jobs.yml
+++ b/conf/real-from-d2-ana/jobs.yml
@@ -8,6 +8,11 @@ JOBS:
     WALLCLOCK: 01:00
     PLATFORM: LOCAL
 
+  PREPARE_LBC_REMOTE:
+    FILE: templates/real-from-d2-ana/prepare_lbc_remote.sh
+    RUNNING: date
+    WALLCLOCK: 03:00
+
   PREPARE_DATE_REMOTE:
     FILE: templates/real-from-d2-ana/prepare_date_remote.sh
     RUNNING: date
@@ -15,7 +20,7 @@ JOBS:
 
   PREPARE_MEMBER:
     FILE: templates/real-from-d2-ana/prepare_member.sh
-    DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE_REMOTE PREPARE_DATE_LOCAL
+    DEPENDENCIES: PREPARE_EXPERIMENT PREPARE_DATE_REMOTE PREPARE_DATE_LOCAL PREPARE_LBC_REMOTE
 
   PREPARE_NAMELIST:
     FILE: templates/real-from-d2-ana/prepare_namelist.py
\ No newline at end of file
diff --git a/conf/real-from-d2-ana/simulation.yml b/conf/real-from-d2-ana/simulation.yml
index 38fb5c13e8de166a6e320e656c6799bc1b3e7b1b..ec716d25f67589e57bf52f9583372eedc3c6a075 100644
--- a/conf/real-from-d2-ana/simulation.yml
+++ b/conf/real-from-d2-ana/simulation.yml
@@ -3,6 +3,8 @@ simulation:
   radiation_grid_filename: icon_grid_0046_R19B06_LR.nc
   external_parameters_filename: icon_extpar_0047_R19B07_L_20220601_tiles.nc
   lateral_boundary_grid_filename: icon_grid_0047_R19B07_L_lbc.nc
+  parent_grid_filename: icon_grid_0028_R02B07_N02.nc
+
   date_format: '%Y-%m-%dT%H:%M:%SZ'
   namelist_paths:
     # Path to the namelists
@@ -22,6 +24,6 @@ simulation:
 
   boundary_conditions:
     # Where are we getting our initial data from?
-    local: true
+    local: false
     parent_folder: /archive/meteo/w2w-p2/B3/ICON-D2_oper_ICLBC/
     member: 1
diff --git a/templates/real-from-d2-ana/icon-remap-helper.py b/templates/real-from-d2-ana/icon-remap-helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..af374b1965c34fc26925c4a69ee5143a85ce2458
--- /dev/null
+++ b/templates/real-from-d2-ana/icon-remap-helper.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python3
+"""
+automatically create namelist files for icon grid to icon grid remapping and run iconremap
+"""
+import argparse
+from enstools.io import read, write
+from enstools.interpolation import nearest_neighbour
+from subprocess import run
+import logging
+import os
+import numpy as np
+import xarray
+
+
+def load_vgrid(grid_file):
+    """
+    read HHL from an input file and calculate also the full level heights
+    """
+    logging.info(f"Reading input file with vertical grid information {grid_file}...")
+    data = read(grid_file)
+    if not "HHL" in data:
+        logging.error(f"HHL not found in {grid_file}")
+        exit(-1)
+    
+    # store both result arrays in one dataset without time dimension
+    result = xarray.Dataset()
+    result["HHL"] = data["HHL"][0,...].compute()
+
+    FHL = xarray.DataArray(np.empty((result["HHL"].shape[0]-1, result["HHL"].shape[1])), name="FHL", dims=("generalVertical2", "cell"))
+    for layer in range(FHL.shape[0]):
+        FHL[layer, ...] = (result["HHL"][layer, ...] + result["HHL"][layer + 1, ...]) / 2
+    result["FHL"] = FHL
+    return result
+
+
+def vertical_interpolation_one_variable(src_hl, dst_hl, values):
+    """
+    perform the interpolation using numpy.interp on one variable
+    """
+    # perform the interpolation gridpointwise
+    result = np.empty((values.shape[0], dst_hl.shape[0], values.shape[2]))
+    for time in range(values.shape[0]):
+        for cell in range(values.shape[2]):
+            # all the flipping is neccessary as the function interp expects increasing values
+            result[time, :, cell] = np.flip(np.interp(np.flip(dst_hl[:,cell], 0), np.flip(src_hl[:,cell], 0), np.flip(values.values[time, :, cell], 0)), 0)
+
+    # create the new xarray DataArray
+    new_array = xarray.DataArray(result, dims=values.dims, name=values.name, attrs=values.attrs)
+    return new_array
+
+
+def vertical_interpolation(src_vgrid, dst_vgrid, input_name, output_name):
+    """
+    perform vertical interpolation
+    """
+    logging.info("starting vertical interpolation...")
+    # read source and destination grids
+    src_vgrid_hl = load_vgrid(src_vgrid)
+    dst_vgrid_hl = load_vgrid(dst_vgrid)
+    src_hhl_dim = src_vgrid_hl["HHL"].shape[0]
+    src_fhl_dim = src_vgrid_hl["FHL"].shape[0]
+    dst_hhl_dim = dst_vgrid_hl["HHL"].shape[0]
+    dst_fhl_dim = dst_vgrid_hl["FHL"].shape[0]
+
+    # read input file
+    infile = read(input_name).compute()
+
+    # create output file
+    outfile = xarray.Dataset()
+
+    # loop over all variables of the input file
+    for var in infile.variables:
+        # VN is special, it is defined on the edges of the grid. find nearest hgith coordinates
+        if var == "VN" and infile[var].shape[1] == src_fhl_dim:
+            logging.info(f"    -> interpolating {var} onto FHL")
+            logging.info("        -> interpolating of height array to the edges")
+            fint = nearest_neighbour(infile["clon"], infile["clat"], infile["elon"], infile["elat"], src_grid="unstructured", dst_grid="unstructured", npoints=2, method="mean")
+            src_vgrid_fhl_vn = fint(src_vgrid_hl["FHL"])
+            dst_vgrid_fhl_vn = fint(dst_vgrid_hl["FHL"])
+            outfile[var] = vertical_interpolation_one_variable(src_vgrid_fhl_vn.values, dst_vgrid_fhl_vn.values, infile[var])
+        elif not var.startswith("height") and len(infile[var].shape) > 1 and infile[var].shape[1] == src_hhl_dim:
+            logging.info(f"    -> interpolating {var} onto HHL")
+            outfile[var] = vertical_interpolation_one_variable(src_vgrid_hl["HHL"].values, dst_vgrid_hl["HHL"].values, infile[var])
+            continue
+        elif not var.startswith("height") and len(infile[var].shape) > 1 and infile[var].shape[1] == src_fhl_dim:
+            logging.info(f"    -> interpolating {var} onto FHL")
+            outfile[var] = vertical_interpolation_one_variable(src_vgrid_hl["FHL"].values, dst_vgrid_hl["FHL"].values, infile[var])
+            continue
+        else:
+            if var.startswith("height") and infile[var].shape[0] == src_hhl_dim:
+                if len(infile[var].shape) == 2:
+                    continue            
+                logging.info(f"    -> replacing old height coordinate '{var}'")
+                outfile[var] = xarray.DataArray(np.arange(1, dst_hhl_dim + 1, 1) + 0.5, name=var, dims=infile[var].dims, attrs=infile[var].attrs)
+                if var + "_bnd" in infile:
+                    bnds = xarray.DataArray(np.empty((dst_hhl_dim, 2)), name=var+"_bnds", dims=infile[var+"_bnds"].dims, attrs=infile[var+"_bnds"].attrs)
+                    bnds[dst_hhl_dim, 0] = outfile[var].values - 0.5
+                    bnds[dst_hhl_dim, 1] = outfile[var].values + 0.5
+                    outfile[var+"_bnds"] = bnds
+            elif var.startswith("height") and infile[var].shape[0] == src_fhl_dim:                
+                if len(infile[var].shape) == 2:
+                    continue            
+                logging.info(f"    -> replacing old height coordinate '{var}'")
+                outfile[var] = xarray.DataArray(np.arange(1, dst_fhl_dim + 1, 1) + 0.5, name=var, dims=infile[var].dims, attrs=infile[var].attrs)
+                if var + "_bnd" in infile:
+                    bnds = xarray.DataArray(np.empty((dst_fhl_dim, 2)), name=var+"_bnds", dims=infile[var+"_bnds"].dims, attrs=infile[var+"_bnds"].attrs)
+                    bnds[:, 0] = outfile[var].values - 0.5
+                    bnds[:, 1] = outfile[var].values + 0.5
+                    outfile[var+"_bnds"] = bnds
+            else:
+                logging.info(f"    -> storing {var} without interpolation")
+                if var in infile.coords:
+                    outfile.coords[var] = infile[var]
+                else:
+                    outfile[var] = infile[var]
+    
+    # store the result
+    logging.info(f"writing file {output_name}")
+    outfile.attrs = infile.attrs
+    outfile.to_netcdf(output_name, engine="scipy")
+
+
+def remap_one_file(in_grid, out_grid, one_file, dst_fodler, rename=None, src_vgrid=None, dst_vgrid=None):
+    """
+    write the remapping namelist and run iconremap
+
+    Parameters
+    ----------
+    in_grid
+    out_grid
+    one_file
+    dst_fodler
+    """
+    # read the file content to get a list of all variables
+    content = read(one_file)
+    all_vars = list(content.data_vars)
+    remap_vars = []
+    for var in all_vars:
+        if not "bnds" in var and not '_vertices' in var and not 'lat' in var and not 'lon' in var:
+            remap_vars.append(var)
+
+    # make sure that destination folder exists
+    if not os.path.exists(dst_fodler):
+        os.makedirs(dst_fodler)
+
+    # is vertical remapping requested?
+    if src_vgrid is not None and dst_vgrid is not None:
+        vinp = True
+        if args.output_format != "nc":
+            logging.error("vertical regridding is only supported for netcdf output!")
+            exit(-1)
+    else:
+        vinp = False
+
+    # rename the file if requested
+    if rename is not None:
+        # read the time stamp
+        if content["time"].size != 1:
+            logging.error("more then one timestep, unable to rename the file!")
+            exit(-1)
+        if content["time"].attrs["units"] == "day as %Y%m%d.%f":
+            date_part = int(content["time"][0])
+            time_part = float(content["time"][0]) - date_part
+            year = str(date_part)[0:4]
+            month = str(date_part)[4:6]
+            day = str(date_part)[6:8]
+            hour = "%02d" % round(time_part * 24)
+        else:
+            logging.error("unsupported timeformat!")
+            exit(-1)
+        # replace ICON-style placeholders:
+        rename = rename.replace("<y>", year)
+        rename = rename.replace("<m>", month)
+        rename = rename.replace("<d>", day)
+        rename = rename.replace("<h>", hour)
+        rename = os.path.join(dst_fodler, rename)
+    else:
+        rename = os.path.join(dst_fodler, (os.path.basename(one_file)))
+
+    # output grib or netcdf
+    filename, ext = os.path.splitext(rename)
+    if args.output_format == "grb":
+        ext = ".grb"
+    elif args.output_format == "nc":
+        ext = ".nc"
+    if ext in ["grib", "grb", "grib2", "grb2"]:
+        out_filetype = 2
+    else:
+        out_filetype = 4
+    rename = filename + ext
+
+    # create namelist for the input file
+    namelist = f"""
+    &remap_nml
+        in_grid_filename   = '{os.path.abspath(in_grid)}'
+        in_filename        = '{os.path.abspath(one_file)}'
+        in_type            = 2
+        out_grid_filename  = '{os.path.abspath(out_grid)}'
+        out_filename       = '{os.path.abspath(rename)}'
+        out_type           = 2
+        out_filetype       = {out_filetype}
+    /
+    """
+    # add all variables
+    for var in remap_vars:
+        # skip VN when vertical interpolation is done. it has a different number of points
+        if vinp and var == "VN":
+            logging.warning("skipping VN due to requested vertical interpolation. Make sure you have U and V!")
+            #continue
+        if "soiltyp" in var.lower() or content[var].dtype in [np.int32, np.int64]:
+            intp_method = 4
+        else:
+            intp_method = 3
+        namelist += f"""
+        &input_field_nml
+            inputname      = "{var}"
+            outputname     = "{var}"
+            intp_method    = {intp_method}
+        /
+        """
+    nml_file = os.path.abspath(os.path.join(dst_fodler, (os.path.basename(one_file))) + ".namelist")
+    with open(nml_file, "w") as nml:
+        nml.write(namelist+"\n")
+
+    # run the remapping tool
+    p = run(["iconremap", "-vvv", "--remap_nml", nml_file], cwd=dst_fodler)
+    if p.returncode != 0:
+        logging.error(f"remapping of {one_file} failed")
+        exit(-1)
+
+    # perform vertical regridding using numpy and enstools
+    if vinp:
+        # move horizontally remapped file to temporal name
+        tmpname = filename + ".no-vinp" + ext
+        vinp_filename = filename + ".vinp" + ext
+        os.rename(rename, tmpname)
+
+        # perform the actual vertical interpolation
+        vertical_interpolation(src_vgrid, dst_vgrid, tmpname, vinp_filename)
+
+
+if __name__ == "__main__":
+    parser = argparse.ArgumentParser(description=__doc__)
+    parser.add_argument("--src-grid", required=True, help="source grid file")
+    parser.add_argument("--src-vgrid", required=False, help="optional: HHL from the source grid. Used for vertical interpolation.")
+    parser.add_argument("--dst-grid", required=True, help="destination grid file")
+    parser.add_argument("--dst-vgrid", required=False, help="optional: HHL for the destination grid. Used for vertical interpolation.")
+    parser.add_argument("--source", nargs="+", required=True, help="source data file(s)")
+    parser.add_argument("--dest", required=True, help="destination folder")
+    parser.add_argument("--output-format", choices=["input", "nc", "grb"], default="input", help="select type of output: input=same as input; nc=netcdf, grb=grib")
+    parser.add_argument("--rename", required=False, help="change the filename. Example: 'latbc_DOM01_ML_<y>-<m>-<d>T<h>.nc'")
+    args = parser.parse_args()
+
+    # if a vertical source grid is given, interpolate it at first horizontally onto the destination grid
+    if args.src_vgrid is not None:
+        src_vgrid_name, ext = os.path.splitext(os.path.basename(args.src_vgrid))
+        if ext != ".grb":
+            logging.error("--src-vgrid is expected to be a grib file with extension .grb!")
+            exit(-1)
+        remap_one_file(args.src_grid, args.dst_grid, args.src_vgrid, args.dest)
+        src_vgrid_name1 = os.path.join(args.dest, src_vgrid_name + ".nc")
+        src_vgrid_name2 = os.path.join(args.dest, src_vgrid_name + ".hinp.nc")
+        os.rename(src_vgrid_name1, src_vgrid_name2)
+        args.src_vgrid = src_vgrid_name2
+
+    # loop over all source files
+    for one_file in args.source:
+        remap_one_file(args.src_grid, args.dst_grid, one_file, args.dest, args.rename, args.src_vgrid, args.dst_vgrid)
diff --git a/templates/real-from-d2-ana/prepare_date_local.sh b/templates/real-from-d2-ana/prepare_date_local.sh
index 0f579e810d440f76cdfc54973254b6fa6f8f82ef..16ec5f75d3e707f320ad533a576850525952c711 100644
--- a/templates/real-from-d2-ana/prepare_date_local.sh
+++ b/templates/real-from-d2-ana/prepare_date_local.sh
@@ -5,7 +5,7 @@
 
 # Because it can happen that the initial conditions as well and the execution happens in the local
 # system we need to define these two variables:
-DATA_IS_LOCAL=%SIMULATION.INITIAL_CONDITIONS.LOCAL%
+IC_DATA_IS_LOCAL=%SIMULATION.INITIAL_CONDITIONS.LOCAL%
 
 if [ "x%HPCARCH%" == "xlocal" ]; then
   RUN_MACHINE_IS_LOCAL="True"
@@ -15,7 +15,7 @@ fi
 
 
 
-if [ "${DATA_IS_LOCAL}" == "True" ]; then
+if [ "${IC_DATA_IS_LOCAL}" == "True" ]; then
   # Get some variables provided by autosubmit.
   WORKDIR=%HPCROOTDIR%
   STARTDATE=%SDATE%
@@ -27,12 +27,14 @@ if [ "${DATA_IS_LOCAL}" == "True" ]; then
 
   AN_MEMBER=$(printf "%03d" %SIMULATION.INITIAL_CONDITIONS.MEMBER%)
   INITIAL_CONDITIONS_PARENT_FOLDER=%SIMULATION.INITIAL_CONDITIONS.PARENT_FOLDER%
-  INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:6}/${STARTDATE:0:8}T00
+  INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:8}00
 
-  AN_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igaf*00.m${AN_MEMBER}.grb" | sort | tail -n 1)
-  FG_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igfff00030000.m${AN_MEMBER}.grb" | sort | tail -n 1)
+  AN_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "an_R19B07.*00_an.${AN_MEMBER}" | sort | tail -n 1)
+  AN_INC_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "an_R19B07.*00_inc.${AN_MEMBER}" | sort | tail -n 1)
+  FG_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "fc_R19B07.*5500.${AN_MEMBER}" | sort | tail -n 1)
 
   AN_FILE=$(basename "${AN_SOURCE}")
+  AN_INC_FILE=$(basename "${AN_INC_SOURCE}")
   FG_FILE=$(basename "${FG_SOURCE}")
 
   # Find files
@@ -41,6 +43,12 @@ if [ "${DATA_IS_LOCAL}" == "True" ]; then
     exit 1
   fi
 
+  if [ ! -f "${AN_INC_SOURCE}" ]; then
+    echo "Analysis increment file for date ${STARTDATE} not found!"
+    exit 1
+  fi
+
+
   if [ ! -f "${FG_SOURCE}" ]; then
     echo "FG file for date ${STARTDATE} not found!"
     exit 1
@@ -54,17 +62,21 @@ if [ "${DATA_IS_LOCAL}" == "True" ]; then
 
     # Save filenames to be used later by other scripts.
     echo "${AN_FILE}" > an_file.txt
+    echo "${AN_INC_FILE}" > an_inc_file.txt
     echo "${FG_FILE}" > fg_file.txt
     rsync -v an_file.txt "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/an_file.txt"
+    rsync -v an_inc_file.txt "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/an_inc_file.txt"
     rsync -v fg_file.txt "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/fg_file.txt"
 
     # Remove temporary files.
     rm an_file.txt
+    rm an_inc_file.txt
     rm fg_file.txt
 
     # Copy the first-guess and analysis files.
     rsync -v "${FG_SOURCE}" "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/${FG_FILE}"
     rsync -v "${AN_SOURCE}" "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/${AN_FILE}"
+    rsync -v "${AN_INC_SOURCE}" "${HPCUSER}@${HPCHOST}":"${COMMON_DATE_FOLDER}/${AN_INC_FILE}"
 
     # Change permissions to read only.
     ssh "${HPCUSER}@${HPCHOST}" chmod 440 "${COMMON_DATE_FOLDER}/*"
@@ -75,13 +87,15 @@ if [ "${DATA_IS_LOCAL}" == "True" ]; then
 
     # Save filenames to be used later by other scripts.
     echo "${AN_FILE}" > an_file.txt
+    echo "${AN_INC_FILE}" > an_inc_file.txt
     echo "${FG_FILE}" > fg_file.txt
 
     # Copy the first-guess and analysis files.
     cp "${FG_SOURCE}" "${FG_FILE}"
     cp "${AN_SOURCE}" "${AN_FILE}"
+    cp "${AN_INC_SOURCE}" "${AN_INC_FILE}"
 
     # Change permissions to read only.
     chmod 440 ./*
   fi
-fi
\ No newline at end of file
+fi
diff --git a/templates/real-from-d2-ana/prepare_date_remote.sh b/templates/real-from-d2-ana/prepare_date_remote.sh
index 29ad0aa4af8b3068b23fa4059741bec9c50ca300..cb3e59e7f016548d9a409cedf0a3750bbc12bac2 100644
--- a/templates/real-from-d2-ana/prepare_date_remote.sh
+++ b/templates/real-from-d2-ana/prepare_date_remote.sh
@@ -2,10 +2,10 @@
 
 # This script is executed on the remote system at which the simulation will happen
 # and will be used if the initial conditions are in this same remote system.
-DATA_IS_LOCAL=%SIMULATION.INITIAL_CONDITIONS.LOCAL%
+IC_DATA_IS_LOCAL=%SIMULATION.INITIAL_CONDITIONS.LOCAL%
 
 
-if [ "${DATA_IS_LOCAL}" != "True" ]; then
+if [ "${IC_DATA_IS_LOCAL}" != "True" ]; then
   # Get some variables provided by autosubmit.
   WORKDIR=%HPCROOTDIR%
   STARTDATE=%SDATE%
@@ -16,12 +16,14 @@ if [ "${DATA_IS_LOCAL}" != "True" ]; then
 
   AN_MEMBER=$(printf "%03d" %SIMULATION.INITIAL_CONDITIONS.MEMBER%)
   INITIAL_CONDITIONS_PARENT_FOLDER=%SIMULATION.INITIAL_CONDITIONS.PARENT_FOLDER%
-  INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:6}/${STARTDATE:0:8}T00
+  INITIAL_CONDITIONS_PATH=${INITIAL_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:8}00
 
-  AN_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igaf*00.m${AN_MEMBER}.grb" | sort | tail -n 1)
-  FG_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "igfff00030000.m${AN_MEMBER}.grb" | sort | tail -n 1)
+  AN_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "an_R19B07.*00_an.${AN_MEMBER}" | sort | tail -n 1)
+  AN_INC_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "an_R19B07.*00_inc.${AN_MEMBER}" | sort | tail -n 1)
+  FG_SOURCE=$(find ${INITIAL_CONDITIONS_PATH} -name "fc_R19B07.*5500.${AN_MEMBER}" | sort | tail -n 1)
 
   AN_FILE=$(basename "${AN_SOURCE}")
+  AN_INC_FILE=$(basename "${AN_INC_SOURCE}")
   FG_FILE=$(basename "${FG_SOURCE}")
 
   # Find files
@@ -30,6 +32,12 @@ if [ "${DATA_IS_LOCAL}" != "True" ]; then
     exit 1
   fi
 
+  if [ ! -f "${AN_INC_SOURCE}" ]; then
+    echo "Analysis increment file for date ${STARTDATE} not found!"
+    exit 1
+  fi
+
+
   if [ ! -f "${FG_SOURCE}" ]; then
     echo "FG file for date ${STARTDATE} not found!"
     exit 1
@@ -43,11 +51,13 @@ if [ "${DATA_IS_LOCAL}" != "True" ]; then
 
   # Save filenames to be used later by other scripts.
   echo "${AN_FILE}" > an_file.txt
+  echo "${AN_INC_FILE}" > an_inc_file.txt
   echo "${FG_FILE}" > fg_file.txt
 
   # Copy the first-guess and analysis files.
   cp "${FG_SOURCE}" "${FG_FILE}"
   cp "${AN_SOURCE}" "${AN_FILE}"
+  cp "${AN_INC_SOURCE}" "${AN_INC_FILE}"
 
   # Change permissions to read only.
   chmod 440 ./*
diff --git a/templates/real-from-d2-ana/prepare_experiment.sh b/templates/real-from-d2-ana/prepare_experiment.sh
index f4f299f9966428d4d4c3113952344c93d4a805d6..298810a207bf8627b107fdbcccb12e3b79cee8bb 100644
--- a/templates/real-from-d2-ana/prepare_experiment.sh
+++ b/templates/real-from-d2-ana/prepare_experiment.sh
@@ -5,6 +5,7 @@ WORKDIR=%HPCROOTDIR%
 DYNAMICS_GRID_FILENAME=%simulation.dynamics_grid_filename%
 RADIATION_GRID_FILE=%simulation.radiation_grid_filename%
 BOUNDARY_GRID_FILE=%simulation.lateral_boundary_grid_filename%
+PARENT_GRID_FILE=%simulation.parent_grid_filename%
 EXTERNAL_PARAMETERS_FILE=%simulation.external_parameters_filename%
 
 
@@ -35,6 +36,7 @@ BASEURL=http://icon-downloads.mpimet.mpg.de/grids/public/edzw
 download_file $BASEURL/${DYNAMICS_GRID_FILENAME}
 download_file $BASEURL/${RADIATION_GRID_FILE}
 download_file $BASEURL/${BOUNDARY_GRID_FILE}
+download_file $BASEURL/${PARENT_GRID_FILE}
 download_file $BASEURL/${EXTERNAL_PARAMETERS_FILE}
 
 # Link input for radiation
diff --git a/templates/real-from-d2-ana/prepare_lbc_remote.sh b/templates/real-from-d2-ana/prepare_lbc_remote.sh
new file mode 100644
index 0000000000000000000000000000000000000000..c714d61f56c6d7567343f7fcd5e76cc0211ff429
--- /dev/null
+++ b/templates/real-from-d2-ana/prepare_lbc_remote.sh
@@ -0,0 +1,98 @@
+#!/bin/bash -l
+
+# This script is executed on the machine at which autosubmit is executed.
+# and will be used if the initial conditions are in this same system.
+
+# Because it can happen that the initial conditions as well and the execution happens in the local
+# system we need to define these two variables:
+LBC_DATA_IS_LOCAL=%SIMULATION.BOUNDARY_CONDITIONS.LOCAL%
+
+if [ "x%HPCARCH%" == "xlocal" ]; then
+  RUN_MACHINE_IS_LOCAL="True"
+else
+  RUN_MACHINE_IS_LOCAL="False"
+fi
+
+if [ "${LBC_DATA_IS_LOCAL}" != "True" ]; then
+  # Get some variables provided by autosubmit.
+  WORKDIR=%HPCROOTDIR%
+  STARTDATE=%SDATE%
+  HPCUSER=%HPCUSER%
+  HPCHOST=%HPCHOST%
+
+  # Define date directory, create it and go there
+  COMMON_DATE_FOLDER=${WORKDIR}/${STARTDATE}/inidata
+
+  AN_MEMBER=$(printf "%03d" %SIMULATION.BOUNDARY_CONDITIONS.MEMBER%)
+  BOUNDARY_CONDITIONS_PARENT_FOLDER=%SIMULATION.BOUNDARY_CONDITIONS.PARENT_FOLDER%
+  BOUNDARY_CONDITIONS_PATH=${BOUNDARY_CONDITIONS_PARENT_FOLDER}/${STARTDATE:0:8}00
+
+  # Fetch tar.gz file of ICON-EU-EPS outputs necessary for the lateral boundary conditions
+  FC_TAR_SOURCE=$(find ${BOUNDARY_CONDITIONS_PATH} -name "iefff.m${AN_MEMBER}.tar.gz" | sort | tail -n 1)
+
+  FC_TAR_FILE=$(basename "${FC_TAR_SOURCE}")
+
+
+  if [ ! -f "${FC_TAR_SOURCE}" ]; then
+    echo "ICON-EU FC file for date ${STARTDATE} not found!"
+    exit 1
+  fi
+
+  # make a working directory for converting boundary conditions
+  mkdir -p work
+
+  tar -zxvf ${FC_TAR_SOURCE} -C work
+
+  DYNAMICS_GRID_FILENAME=%simulation.dynamics_grid_filename%
+  BOUNDARY_GRID_FILE=%simulation.lateral_boundary_grid_filename%
+  PARENT_GRID_FILE=%simulation.parent_grid_filename%
+
+  # Loop through hours
+  for tt in {3..27}; do
+    th=$((tt % 24))
+    td=$((tt / 24))
+    EU_FC_TIME=$(printf "%02d" $td)$(printf "%02d" $th)
+
+    tt2=$((tt-3))
+    th=$((tt2 % 24))
+    td=$((tt2 / 24))
+    D2_FC_TIME=$(printf "%03d" $td)$(printf "%02d" $th)
+
+    # Convert ICON-EU-EPS to latbc
+    ${WORKDIR}/%python_environment.folder_name%/bin/python3 ${WORKDIR}/proj/templates/real-from-d2-ana/icon-remap-helper.py \
+        --src-grid ${COMMON_DATE_FOLDER}/${PARENT_GRID_FILE} \
+        --dst-grid ${COMMON_DATE_FOLDER}/${BOUNDARY_GRID_FILE} \
+        --source   work/iefff${EU_FC_TIME}0000.m${AN_MEMBER} \
+        --dest ${COMMON_DATE_FOLDER}/latbc_${D2_FC_TIME}00.m${AN_MEMBER} \
+        --output-format grib
+  done
+
+  # Fetch tar.gz file of ICON-EU-EPS outputs necessary for the lateral boundary conditions
+  LBC_SOURCE=$(find ${COMMON_DATE_FOLDER} -name "latbc_*00.m${AN_MEMBER}" | sort )
+
+  LBC_FILE=$(basename "${LBC_SOURCE}")
+
+
+  if [ ! -f "${LBC_SOURCE}" ]; then
+    echo "Failed to make boundary conditions file for date ${STARTDATE}!"
+    exit 1
+  fi
+
+  # Check if we copy the initial conditions from the local system or the remote one
+  # Create member folder and go there
+  mkdir -p ${COMMON_DATE_FOLDER}
+  cd ${COMMON_DATE_FOLDER} || exit
+
+  # Save filenames to be used later by other scripts.
+  echo "${LBC_SOURCE}" > lbc_file.txt
+
+  # Copy the first-guess and analysis files.
+  #cp "${LBC_SOURCE}" "${LBC_FILE}"
+
+  # Change permissions to read only.
+  chmod 440 ./*
+
+  # clear the working directory
+  rm -r work
+
+fi
\ No newline at end of file