From 86fa256feeba8ff0f687ead1863c0b062329a065 Mon Sep 17 00:00:00 2001
From: Lorenzo Pagliai <lorenzo.pagliai@seco.com>
Date: Fri, 11 Nov 2022 08:59:05 +0100
Subject: [PATCH] [CICD] Updated with latest changes from SECO-NE

---
 .gitignore                                    |   2 +-
 .gitlab-ci.yml                                | 178 ++++----
 build-common.yml                              | 116 +++++
 build-jobs-ci-test.jinja2                     | 207 ---------
 build-jobs-ci-test.yml.jinja2                 | 172 +++++++
 ...octo.jinja2 => build-jobs-yocto.yml.jinja2 |  87 ++--
 build-yocto.yml                               | 145 ++++++
 common.yml                                    |  12 +-
 docs/automatic-manifest-integration.md        |   1 -
 docs/manifest-pipeline.md                     |   4 +-
 manifest-build.yml                            | 268 -----------
 manifest-integration-ci-test.yml              |  15 -
 manifest-integration-jobs.yml.jinja2          |  88 ++++
 manifest-integration-pipelines.yml.jinja2     |  84 ++++
 manifest-integration-yocto.yml                |  15 -
 manifest-integration.yml                      | 214 ++++-----
 manifest-package.yml                          |  48 +-
 manifest-pipeline-ci-test.yml                 |  12 +-
 manifest-pipeline-yocto.yml                   |  29 +-
 manifest-pipeline.yml                         | 100 ++--
 scripts/alphaplan_fwr.py                      |  20 +-
 scripts/changelog_generator.py                |   2 +-
 ...eck_if_integration_branch_is_up_to_date.py | 126 +++--
 scripts/colors.py                             |  47 ++
 scripts/common.py                             |  36 +-
 scripts/convert_md2html.py                    |   2 +-
 scripts/deploy_gitlab_ci.py                   | 409 +++++++++--------
 scripts/download_file_from_latest_job.py      | 107 +++++
 scripts/download_job_artifacts.py             | 164 +++++++
 scripts/generate_release_metadata.py          |  38 +-
 scripts/get_integration_sources.py            | 107 +++++
 scripts/get_manifest_projects.py              |  67 ---
 scripts/get_pipeline_jobs.py                  | 131 ++++++
 scripts/integrate_into_manifest.py            |  25 +-
 scripts/lava_api.py                           | 430 ++++++++++++++++++
 scripts/lava_create_testreport.py             | 326 +++++++++++++
 scripts/lava_credentials.py                   |  93 ++++
 scripts/markdown_generator.py                 | 316 +++++++++++++
 scripts/merge_into_manifest.py                |  38 +-
 scripts/package_release.py                    | 240 +++++-----
 scripts/retrigger_integrating_projects.py     | 112 +++++
 scripts/retrigger_mr_pipeline_job.py          |  89 ----
 scripts/retrigger_mr_pipeline_jobs.py         |  93 ----
 scripts/retrigger_pipeline_jobs.py            | 126 +++++
 scripts/submit_test.py                        |  18 +-
 scripts/update_submodule.py                   |  22 +-
 46 files changed, 3413 insertions(+), 1568 deletions(-)
 create mode 100644 build-common.yml
 delete mode 100644 build-jobs-ci-test.jinja2
 create mode 100644 build-jobs-ci-test.yml.jinja2
 rename build-jobs-yocto.jinja2 => build-jobs-yocto.yml.jinja2 (67%)
 create mode 100644 build-yocto.yml
 delete mode 100644 manifest-build.yml
 delete mode 100644 manifest-integration-ci-test.yml
 create mode 100644 manifest-integration-jobs.yml.jinja2
 create mode 100644 manifest-integration-pipelines.yml.jinja2
 delete mode 100644 manifest-integration-yocto.yml
 create mode 100755 scripts/colors.py
 create mode 100755 scripts/download_file_from_latest_job.py
 create mode 100755 scripts/download_job_artifacts.py
 create mode 100755 scripts/get_integration_sources.py
 delete mode 100755 scripts/get_manifest_projects.py
 create mode 100755 scripts/get_pipeline_jobs.py
 create mode 100755 scripts/lava_api.py
 create mode 100755 scripts/lava_create_testreport.py
 create mode 100755 scripts/lava_credentials.py
 create mode 100755 scripts/markdown_generator.py
 create mode 100755 scripts/retrigger_integrating_projects.py
 delete mode 100755 scripts/retrigger_mr_pipeline_job.py
 delete mode 100755 scripts/retrigger_mr_pipeline_jobs.py
 create mode 100755 scripts/retrigger_pipeline_jobs.py

diff --git a/.gitignore b/.gitignore
index 50ec84c..6467d59 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,2 @@
-/__pycache__
+scripts/__pycache__
 .idea/
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index d2b7d36..5334749 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,12 +1,12 @@
+---
 # ---------------------------------------------------------------------------------------
 # Global
 # ---------------------------------------------------------------------------------------
----
 
 variables:
   # CI_IMAGES_BASEPATH: Environment variable configure in gitlab
   CI_IMAGES_PATH: ${CI_IMAGES_BASEPATH}/ci-images
-  CI_IMAGES_REVISION: 44965ccdd847f1e077670f49d546047f8ad0110c
+  CI_IMAGES_REVISION: 5dfddb02d67bbd16beb09ff4e31afcd5380f5788
   CI_IMAGE_PYTHON: "${CI_IMAGES_PATH}/python/3.9:${CI_IMAGES_REVISION}"
   CI_IMAGE_YOCTO: "${CI_IMAGES_PATH}/yocto-build/ubuntu-20.04:${CI_IMAGES_REVISION}"
 
@@ -65,45 +65,6 @@ yamllint:
 # ---------------------------------------------------------------------------------------
 # Stage: integrate
 # ---------------------------------------------------------------------------------------
-.ci-test-projects:
-  variables:
-    PROJECT_ROOT:
-      ${CI_PROJECT_ROOT_NAMESPACE}/yocto/infrastructure/ci-test
-    MANIFEST_PROJECT:
-      ${PROJECT_ROOT}/minimal-manifest
-
-    INTEGRATE_INTO:
-      ${PROJECT_ROOT}/minimal-foo
-      ${PROJECT_ROOT}/minimal-bar
-      ${PROJECT_ROOT}/minimal-srcrev
-
-.yocto-projects:
-  variables:
-    PROJECT_ROOT:
-      ${CI_PROJECT_ROOT_NAMESPACE}
-    MANIFEST_PROJECT:
-      ${PROJECT_ROOT}/yocto/manifest
-    INTEGRATE_INTO:
-      ${PROJECT_ROOT}/3rd-party/kuk/uboot-imx-kuk
-      ${PROJECT_ROOT}/kernel/linux-guf
-      ${PROJECT_ROOT}/kernel/linux-imx-kuk
-      ${PROJECT_ROOT}/kernel/modules/egalaxi2c
-      ${PROJECT_ROOT}/kernel/modules/gfplatdetect
-      ${PROJECT_ROOT}/tools/gf-emc-test-suite
-      ${PROJECT_ROOT}/tools/gf-productiontests
-      ${PROJECT_ROOT}/tools/gfeeprom
-      ${PROJECT_ROOT}/tools/gfxml2dto
-      ${PROJECT_ROOT}/tools/guf-show-demo
-      ${PROJECT_ROOT}/tools/libmdb
-      ${PROJECT_ROOT}/tools/touchcal-conv
-      ${PROJECT_ROOT}/tools/xconfig
-      ${PROJECT_ROOT}/tools/qt-multi-screen-compositor
-      ${PROJECT_ROOT}/yocto/config
-      ${PROJECT_ROOT}/yocto/custom/dual-espresso/meta-seconorth-dual-espresso
-      ${PROJECT_ROOT}/yocto/layers/meta-seconorth-distro
-      ${PROJECT_ROOT}/yocto/layers/meta-seconorth-machine
-      ${PROJECT_ROOT}/yocto/layers/meta-seconorth-nogplv3
-
 .integrate:
   stage: integrate
   rules:
@@ -118,91 +79,124 @@ yamllint:
       --gitlab-url=${CI_SERVER_URL}
       --token=${GITBOT_TOKEN}
       --manifest-project=${MANIFEST_PROJECT}
+      --manifest-branch=${MANIFEST_BRANCH}
       --submodule=.gitlab-ci
       --revision=${CI_COMMIT_SHA}
-      --verbose
+      --group=${PROJECT_GROUP}
       ${MERGE}
-      ${INTEGRATE_INTO}
 
-integrate-yocto:
-  extends:
-    - .integrate
-    - .yocto-projects
+# Running multiple integration jobs for the same manifest in parallel can lead to race
+# conditions if there are projects integrating a single branch to different manifest
+# branches. Use resource groups to force execution one by one.
+.integrate-ci-test:
+  extends: .integrate
+  resource_group: integrate-ci-test
+  variables:
+    PROJECT_GROUP: ${CI_PROJECT_ROOT_NAMESPACE}/yocto/infrastructure/ci-test
+    MANIFEST_PROJECT: ${PROJECT_GROUP}/minimal-manifest
+
+.integrate-yocto:
+  extends: .integrate
+  resource_group: integrate-yocto
+  variables:
+    PROJECT_GROUP: ${CI_PROJECT_ROOT_NAMESPACE}
+    MANIFEST_PROJECT: ${PROJECT_GROUP}/yocto/manifest
+
+# Jobs
+
+integrate-ci-test:primary:
+  extends: .integrate-ci-test
+  variables:
+    MANIFEST_BRANCH: primary
+
+integrate-ci-test:secondary:
+  extends: .integrate-ci-test
+  variables:
+    MANIFEST_BRANCH: secondary
+
+integrate-yocto:dunfell:
+  extends: .integrate-yocto
+  variables:
+    MANIFEST_BRANCH: dunfell
+
+integrate-yocto:kirkstone:
+  extends: .integrate-yocto
+  variables:
+    MANIFEST_BRANCH: kirkstone
 
-integrate-ci-test:
-  extends:
-    - .integrate
-    - .ci-test-projects
 
 # --------------------------------------------------------------------------------------
 # Stage: build
 # --------------------------------------------------------------------------------------
-build-yocto:
+.build:
   stage: build
-  needs: [integrate-yocto]
   rules:
     - if: $CI_MERGE_REQUEST_IID
       allow_failure: true
+
+.build-ci-test:
+  extends: .build
   trigger:
-    project: seco-ne/yocto/manifest
-    branch: "integrate/${CI_PROJECT_NAME}/${CI_COMMIT_REF_NAME}"
+    project: seco-ne/yocto/infrastructure/ci-test/minimal-manifest
+    branch: "integrate/${CI_PROJECT_NAME}/${CI_COMMIT_REF_NAME}/into/${MANIFEST_BRANCH}"
     strategy: depend
 
-build-ci-test:
-  stage: build
-  needs: [integrate-ci-test]
-  rules:
-    - if: $CI_MERGE_REQUEST_IID
-      allow_failure: true
+.build-yocto:
+  extends: .build
   trigger:
-    project: seco-ne/yocto/infrastructure/ci-test/minimal-manifest
-    branch: "integrate/${CI_PROJECT_NAME}/${CI_COMMIT_REF_NAME}"
+    project: seco-ne/yocto/manifest
+    branch: "integrate/${CI_PROJECT_NAME}/${CI_COMMIT_REF_NAME}/into/${MANIFEST_BRANCH}"
     strategy: depend
 
+# Jobs
+
+build-ci-test:primary:
+  extends: .build-ci-test
+  needs: ["integrate-ci-test:primary"]
+  variables:
+    MANIFEST_BRANCH: primary
+
+build-ci-test:secondary:
+  extends: .build-ci-test
+  needs: ["integrate-ci-test:secondary"]
+  variables:
+    MANIFEST_BRANCH: secondary
+
+build-yocto:dunfell:
+  extends: .build-yocto
+  needs: ["integrate-yocto:dunfell"]
+  variables:
+    MANIFEST_BRANCH: dunfell
+
+build-yocto:kirkstone:
+  extends: .build-yocto
+  needs: ["integrate-yocto:kirkstone"]
+  variables:
+    MANIFEST_BRANCH: kirkstone
+
+
 # --------------------------------------------------------------------------------------
 # Stage: merge
 # --------------------------------------------------------------------------------------
 .merge:
-  extends: .integrate
   stage: merge
   rules:
     - if: $CI_COMMIT_BRANCH == "master"
       when: manual
       allow_failure: true
   variables:
-    MERGE: --merge
+    MERGE: --merge --project=${CI_PROJECT_PATH} --branch=${CI_COMMIT_REF_NAME}
 
 merge-ci-test:
   extends:
+    - .integrate-ci-test
     - .merge
-    - .ci-test-projects
+  variables:
+    MANIFEST_BRANCH: primary,secondary
 
 merge-yocto:
   extends:
+    - .integrate-yocto
     - .merge
-    - .yocto-projects
-
-# --------------------------------------------------------------------------------------
-# Stage: build
-# --------------------------------------------------------------------------------------
-build-master-yocto:
-  stage: build
-  needs: [merge-yocto]
-  rules:
-    - if: $CI_COMMIT_BRANCH == "master"
-      when: manual
-  trigger:
-    project: seco-ne/yocto/manifest
-    branch: "dunfell"
-    strategy: depend
-
-build-master-ci-test:
-  stage: build
-  needs: [merge-ci-test]
-  rules:
-    - if: $CI_COMMIT_BRANCH == "master"
-      when: manual
-  trigger:
-    project: seco-ne/yocto/infrastructure/ci-test/minimal-manifest
-    branch: "master"
-    strategy: depend
+  variables:
+    MANIFEST_BRANCH: dunfell,kirkstone
diff --git a/build-common.yml b/build-common.yml
new file mode 100644
index 0000000..f926abf
--- /dev/null
+++ b/build-common.yml
@@ -0,0 +1,116 @@
+---
+workflow:
+  rules:
+    # This rule is needed, as otherwise the workflow:rules from
+    # the parent job seem to be used and prevent the pipeline generation
+    - if: $CI_PIPELINE_SOURCE == "parent_pipeline"
+
+.docker_check:
+  # Check if the build folder is empty. Sometimes the docker volume for the build is not
+  # removed afterwards (e.g. in case of a timeout), then a follow-up build might fail.
+  - |-
+    if [ "$(ls -A)" ]; then
+        echo "ERROR: Build folder is not empty. This might be the case because the" \
+             "docker volume has not been not removed in a previous job. Please check" \
+             "the docker container \"$CI_JOB_IMAGE\" on the GitLab runner" \
+             "\"$CI_RUNNER_SHORT_TOKEN\" ($CI_RUNNER_DESCRIPTION) and remove any" \
+             "attached volumes from it."
+        exit 1
+    fi
+
+.setup_ssh:
+  # Setup ssh key to access private repos
+  # https://docs.gitlab.com/ee/ci/ssh_keys/#ssh-keys-when-using-the-docker-executor
+  # An SSH keypair has been generated for the manifest's pipeline to be able to access
+  # all private repositories in the manifest. The private key has been stored in the
+  # CI/CD variable GITLAB_PRIVATE_KEY of the manifest repository. The public key has
+  # been added as a deploy key in GitLab's repository settings for all contained
+  # repositories.
+  - echo "${LOGPREFIX} Setup SSH"
+  - eval $(ssh-agent -s)
+  - echo "$GITLAB_PRIVATE_KEY" | tr -d '\r' | ssh-add -
+  - mkdir -p ~/.ssh
+  - chmod 700 ~/.ssh
+  # Add GitLab server to known hosts
+  # https://docs.gitlab.com/ee/ci/ssh_keys/#verifying-the-ssh-host-keys
+  # In order to access the private repositories via SSH, the GitLab server has to be
+  # added to the known_hosts file. The host keys were determined using the command
+  #   ssh-keyscan [-p port] <gitlab-server>
+  # and have been stored in the GITLAB_KNOWN_HOSTS CI/CD variable.
+  - echo "$GITLAB_KNOWN_HOSTS" >> ~/.ssh/known_hosts
+  - chmod 644 ~/.ssh/known_hosts
+
+.repo_checkout: &repo_checkout
+  - echo "${LOGPREFIX} Perform repo checkout"
+  - cd ${CI_PROJECT_DIR}
+  - repo init --submodules -u ${CI_REPOSITORY_URL}
+      -b refs/pipelines/${CI_PIPELINE_ID}
+  - repo sync --detach --current-branch --force-remove-dirty
+      --optimized-fetch --force-sync
+
+.buildbase:
+  tags:
+    - builds
+  timeout: 8h
+  interruptible: true
+  image:
+    name: "${CI_IMAGE_YOCTO}"
+    # Override entrypoint so we can pass --id to set the UID and GID for the
+    # user that is created in the container. This is a feature of the
+    # crops/poky images. See poky-entry.py for details.
+    entrypoint:
+      - "/usr/bin/distro-entry.sh"
+      - "/usr/bin/dumb-init"
+      - "--"
+      - "/usr/bin/poky-entry.py"
+      - "--id=118:998"
+  artifacts:
+    expire_in: 4 weeks
+
+.test:
+  extends:
+    - .infrastructure
+  timeout: 1h
+  rules:
+    - when: manual
+      allow_failure: true
+  dependencies: []
+  variables:
+    # TODO checkout only gitlab-ci repo to allow running on deleted branches
+    # Include git submodules
+    GIT_SUBMODULE_STRATEGY: recursive
+    CI_PARAM_TEST_SUITE: '{platform}.jinja2'
+    CI_PARAM_EXTRA: --nop
+    GITLAB_SERVER: "${CI_SERVER_HOST}:${CI_SERVER_SSH_PORT}"
+    GIT_BASE_URL: "ssh://git@${GITLAB_SERVER}/${CI_PROJECT_ROOT_NAMESPACE}"
+    TESTS_GIT_URL: "${GIT_BASE_URL}/yocto/tests.git"
+    TEST_REPO_BRANCH: ${MASTER_BRANCH}
+  artifacts:
+    when: always
+    paths:
+      - "results/**"
+    reports:
+      junit: results/results-*.xml
+  before_script:
+    - !reference [.setup_ssh]
+  script:
+    - .gitlab-ci/scripts/download_file_from_latest_job.py
+        --project $CI_PROJECT_ID
+        --pipeline $CI_PIPELINE_ID
+        --token=$GITBOT_TOKEN
+        --name $CI_PARAM_BUILDJOB
+        --path build.env
+    - cat build.env
+    - source build.env
+    # Submit tests to lava server
+    - .gitlab-ci/scripts/submit_test.py
+        --fng-install "$FNG_INSTALL_URL"
+        --name
+        "Gitlab $BUILD_MACHINE $BUILD_DISTRO $BUILD_IMAGE ($CI_PIPELINE_ID)"
+        --results-path "results"
+        --report-name "testresults-${CI_JOB_NAME}.md"
+        --test-repo $TESTS_GIT_URL
+        --test-repo-branch $TEST_REPO_BRANCH
+        --test-plan $CI_PARAM_TEST_SUITE
+        $CI_PARAM_EXTRA
+        $CI_PARAM_PLATFORMS
diff --git a/build-jobs-ci-test.jinja2 b/build-jobs-ci-test.jinja2
deleted file mode 100644
index 4aebd92..0000000
--- a/build-jobs-ci-test.jinja2
+++ /dev/null
@@ -1,207 +0,0 @@
----
-# --------------------------------------------------------------------------------------
-# Global
-# --------------------------------------------------------------------------------------
-# As the trigger job is not executed in a environment with checked out repository, we
-# need to get the includes directly from gitlab
-include:
-  - project: '{{ CI_PROJECT_ROOT_NAMESPACE }}/yocto/infrastructure/gitlab-ci'
-    ref: {{ GITLAB_CI_REVISION }}
-    file: 'common.yml'
-
-workflow:
-  rules:
-    # This rule is needed, as otherwise the workflow:rules from
-    # the parent job seem to be used and prevent the pipeline generation
-    - if: $CI_PIPELINE_SOURCE == "parent_pipeline"
-
-stages:
-  - infrastructure
-  - build
-  - test
-
-# --------------------------------------------------------------------------------------
-# Stage: infrastructure
-# --------------------------------------------------------------------------------------
-yamllint:
-  extends: .yamllint
-
-# --------------------------------------------------------------------------------------
-# Stage: build
-# --------------------------------------------------------------------------------------
-.setup_ssh: &setup_ssh
-  # Setup ssh key to access private repos
-  # https://docs.gitlab.com/ee/ci/ssh_keys/#ssh-keys-when-using-the-docker-executor
-  # An SSH keypair has been generated for the manifest's pipeline to be able to access
-  # all private repositories in the manifest. The private key has been stored in the
-  # CI/CD variable GITLAB_PRIVATE_KEY of the manifest repository. The public key has
-  # been added as a deploy key in GitLab's repository settings for all contained
-  # repositories.
-  - eval $(ssh-agent -s)
-  - echo "$GITLAB_PRIVATE_KEY" | tr -d '\r' | ssh-add -
-  - mkdir -p ~/.ssh
-  - chmod 700 ~/.ssh
-  # Add GitLab server to known hosts
-  # https://docs.gitlab.com/ee/ci/ssh_keys/#verifying-the-ssh-host-keys
-  # In order to access the private repositories via SSH, the GitLab server has to be
-  # added to the known_hosts file. The host keys were determined using the command
-  #   ssh-keyscan [-p port] <gitlab-server>
-  # and have been stored in the GITLAB_KNOWN_HOSTS CI/CD variable.
-  - echo "$GITLAB_KNOWN_HOSTS" >> ~/.ssh/known_hosts
-  - chmod 644 ~/.ssh/known_hosts
-
-.repo_checkout: &repo_checkout
-  # setup build dir
-  - cd ${CI_PROJECT_DIR}
-  - repo init -u ${CI_REPOSITORY_URL} -b refs/pipelines/${CI_PIPELINE_ID}
-  - repo sync --detach --current-branch --no-tags --force-remove-dirty
-              --optimized-fetch --force-sync
-
-.build: &build
-  - cd ${CI_PROJECT_DIR}
-  - VERSION=$(cd .repo/manifests && git describe --tags)
-  - cat .repo/manifests/default.xml
-  - ls * > files-$VERSION.txt
-  - ls *
-  - FOO_FILES=$(cd foo && ls | wc -l)
-  - BAR_FILES=$(cd bar && ls | wc -l)
-  - DIFF=$((BAR_FILES-FOO_FILES))
-  - (($DIFF >= -1 && $DIFF <= 1))
-
-build:files:
-  stage: build
-  needs: []
-  tags:
-    - infrastructure
-  timeout: 2m
-  image:
-    name: "${CI_IMAGE_YOCTO}"
-    # Override entrypoint so we can pass --id to set the UID and GID for the user that
-    # is created in the container. This is a feature of the crops/poky images.
-    # See poky-entry.py for details.
-    entrypoint:
-      - "/usr/bin/distro-entry.sh"
-      - "/usr/bin/dumb-init"
-      - "--"
-      - "/usr/bin/poky-entry.py"
-      - "--id=118:998"
-  variables:
-    GIT_STRATEGY: none
-  before_script:
-    - *setup_ssh
-  script:
-    - *repo_checkout
-    - *build
-  artifacts:
-    expire_in: 7d
-    paths:
-      - files-*
-
-build:echo:
-  stage: build
-  needs: []
-  tags:
-    - infrastructure
-  timeout: 2m
-  image: ${CI_IMAGE_PYTHON}
-  script:
-    - printenv
-    - echo "Build successful"
-
-# --------------------------------------------------------------------------------------
-# Stage: test
-# --------------------------------------------------------------------------------------
-.test-simple:
-  stage: test
-  rules:
-    - when: manual
-      allow_failure: true
-  tags:
-    - infrastructure
-  image: ${CI_IMAGE_PYTHON}
-  script:
-    - exit ${RETURNCODE}
-
-test:pass:
-  extends:
-    - .test-simple
-  variables:
-    RETURNCODE: 0
-
-test:fail:
-  extends:
-    - .test-simple
-  variables:
-    RETURNCODE: 1
-
-# ---------------------------------------------------------------------------------------
-# Stage: test
-# ---------------------------------------------------------------------------------------
-.test:
-  extends:
-    - .infrastructure
-  timeout: 1h
-  rules:
-    - when: manual
-      allow_failure: true
-  variables:
-    # Include git submodules
-    GIT_SUBMODULE_STRATEGY: recursive
-    CI_PARAM_TEST_SUITE: '{platform}.jinja2'
-    CI_PARAM_EXTRA: --nop
-    LOGPREFIX: "CI:test:"
-  artifacts:
-    when: always
-    paths:
-      - "results/**"
-    reports:
-      junit: results/results-*.xml
-  before_script:
-    # Setup ssh key to access private repos
-    # https://docs.gitlab.com/ee/ci/ssh_keys/#ssh-keys-when-using-the-docker-executor
-    # An SSH keypair has been generated for the manifest's pipeline to be able to access
-    # all private repositories in the manifest. The private key has been stored in the
-    # CI/CD variable GITLAB_PRIVATE_KEY of the manifest repository. The public key has
-    # been added as a deploy key in GitLab's repository settings for all contained
-    # repositories.
-    - echo "${LOGPREFIX} Setup SSH"
-    - eval $(ssh-agent -s)
-    - echo "$GITLAB_PRIVATE_KEY" | tr -d '\r' | ssh-add -
-    - mkdir -p ~/.ssh
-    - chmod 700 ~/.ssh
-    # Add GitLab server to known hosts
-    # https://docs.gitlab.com/ee/ci/ssh_keys/#verifying-the-ssh-host-keys
-    # In order to access the private repositories via SSH, the GitLab server has to be
-    # added to the known_hosts file. The host keys were determined using the command
-    #   ssh-keyscan [-p port] <gitlab-server>
-    # and have been stored in the GITLAB_KNOWN_HOSTS CI/CD variable.
-    - echo "$GITLAB_KNOWN_HOSTS" >> ~/.ssh/known_hosts
-    - chmod 644 ~/.ssh/known_hosts
-
-  script:
-    - |-
-      # Submit tests to lava server
-      # Use existing release as we don't build in CI-test
-      DEPLOYPATH_TEST="http://srv73/SoftwareStore/Linux-Yocto/Releases/"
-      RELEASE="Yocto-dunfell-15.0/"
-      INSTALLSCRIPT_ABS="$DEPLOYPATH_TEST/$RELEASE/$CI_PARAM_MACHINE/fng-install.sh"
-      FNG_INSTALL_URL="${INSTALLSCRIPT_ABS#/*/}"
-      .gitlab-ci/scripts/submit_test.py \
-          --fng-install "${FNG_INSTALL_URL}" \
-          --name \
-          "Gitlab build test ${CI_PARAM_MACHINE} ${RELEASE} ${CI_PIPELINE_ID}" \
-          --results-path "results" \
-          --test-repo ${TESTS_GIT_URL} \
-          --test-repo-branch dunfell \
-          --test-plan ${CI_PARAM_TEST_SUITE} \
-          ${CI_PARAM_EXTRA} \
-          ${CI_PARAM_PLATFORMS}
-
-smoketest:imx6guf:
-  extends: .test
-  stage: test
-  variables:
-    CI_PARAM_MACHINE: imx6guf
-    CI_PARAM_PLATFORMS: imx6guf
-    CI_PARAM_TEST_SUITE: boot.jinja2
-    CI_PARAM_EXTRA: --all-devices
diff --git a/build-jobs-ci-test.yml.jinja2 b/build-jobs-ci-test.yml.jinja2
new file mode 100644
index 0000000..1eb2fb1
--- /dev/null
+++ b/build-jobs-ci-test.yml.jinja2
@@ -0,0 +1,172 @@
+---
+# --------------------------------------------------------------------------------------
+# Global
+# --------------------------------------------------------------------------------------
+# As the trigger job is not executed in a environment with checked out repository, we
+# need to get the includes directly from gitlab
+include:
+  - project: '{{ CI_PROJECT_ROOT_NAMESPACE }}/yocto/infrastructure/gitlab-ci'
+    ref: {{ GITLAB_CI_REVISION }}
+    file:
+      - build-common.yml
+      - common.yml
+
+stages:
+  - build
+  - test
+
+# --------------------------------------------------------------------------------------
+# Stage: build
+# --------------------------------------------------------------------------------------
+.build: &build
+  - cd ${CI_PROJECT_DIR}
+  - VERSION=$(cd .repo/manifests && git describe --tags)
+  - cat .repo/manifests/default.xml
+  - find foo -name "[0-9]*.txt" -printf '%P\n' | sort -V > files-foo-$VERSION.txt
+  - cat files-foo-$VERSION.txt
+  - find bar -name "[0-9]*.txt" -printf '%P\n' | sort -V > files-bar-$VERSION.txt
+  - cat files-bar-$VERSION.txt
+  - FOO_FILES=$(cat files-foo-$VERSION.txt | wc -l)
+  - BAR_FILES=$(cat files-bar-$VERSION.txt | wc -l)
+  - DIFF=$((BAR_FILES-FOO_FILES))
+  - (($DIFF >= -1 && $DIFF <= 1))
+
+build:files:
+  stage: build
+  extends:
+    - .buildbase
+  needs: []
+  tags:
+    - infrastructure
+  timeout: 2m
+  variables:
+    GIT_STRATEGY: none
+    LOGPREFIX: "CI:build:"
+  before_script:
+    - !reference [.docker_check]
+    - !reference [.setup_ssh]
+    - !reference [.repo_checkout]
+  script:
+    - *build
+  artifacts:
+    paths:
+      - files-*
+
+build:echo:
+  stage: build
+  needs: []
+  tags:
+    - infrastructure
+  timeout: 2m
+  image: ${CI_IMAGE_PYTHON}
+  script:
+    - printenv
+    - echo "Build successful"
+
+build:check-foo-branch:
+  stage: build
+  extends:
+    - .buildbase
+  needs: []
+  tags:
+    - infrastructure
+  timeout: 2m
+  variables:
+    GIT_STRATEGY: none
+  before_script:
+    - !reference [.docker_check]
+    - !reference [.setup_ssh]
+    - !reference [.repo_checkout]
+  script: |
+    echo "repo branch: $MASTER_BRANCH"
+    echo "foo branch:  $(cat foo/branch.txt)"
+    if [[ "$MASTER_BRANCH" != $(cat foo/branch.txt) ]]; then
+      echo "ERROR: Branches do not match!"
+      exit 1
+    fi
+
+build-imx6guf-fake:
+  stage: build
+  needs: []
+  tags:
+    - infrastructure
+  timeout: 20m
+  image: ${CI_IMAGE_PYTHON}
+  rules:
+    - when: manual
+      allow_failure: true
+  variables:
+    MANIFEST_VERSION: Yocto-dunfell-17.0
+    CI_PARAM_MACHINE: imx6guf
+    CI_PARAM_DISTRO: guf-wayland
+    CI_PARAM_IMAGE: guf-image
+    BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}"
+    IMAGEBASEPATH: "tmp/deploy/images/"
+    IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}"
+    LICENSESPATH: "tmp/deploy/licenses"
+    SDKPATH: "tmp/deploy/sdk/"
+    INSTALLSCRIPT: "fng-install.sh"
+    JOB_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/"
+    FNG_INSTALL_PATH: "${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}"
+
+  script:
+    # TODO: this becomes similar to the yocto build and may be merged
+    - echo "BUILD_MACHINE=$CI_PARAM_MACHINE" > build.env
+    - echo "BUILD_IMAGE=$CI_PARAM_IMAGE" >> build.env
+    - echo "BUILD_DISTRO=$CI_PARAM_DISTRO" >> build.env
+    - echo "MANIFEST_VERSION=$MANIFEST_VERSION" >> build.env
+    - echo "BUILD_PATH_IMAGE=${BUILDPATH}/${IMAGEPATH}" >> build.env
+    - echo "BUILD_PATH_SDK=${BUILDPATH}/${SDKPATH}" >> build.env
+    - echo "BUILD_PATH_LICENSE=${BUILDPATH}/${LICENSESPATH}" >> build.env
+    - echo "FNG_INSTALL_URL=${JOB_URL}${FNG_INSTALL_PATH}" >> build.env
+    - source build.env
+
+    - echo "Getting yocto build from Z:"
+    - mkdir -p $BUILD_PATH_IMAGE
+    - cp -v /artifacts-yocto/Releases/$MANIFEST_VERSION/$CI_PARAM_MACHINE/*.* $BUILD_PATH_IMAGE
+    - ls $BUILD_PATH_IMAGE
+  artifacts:
+    reports:
+      dotenv: build.env
+    paths:
+      - build-guf-wayland-imx6guf/tmp/deploy/images/imx6guf/*
+      - build.env
+
+# --------------------------------------------------------------------------------------
+# Stage: test
+# --------------------------------------------------------------------------------------
+.test_simple:
+  stage: test
+  rules:
+    - when: manual
+      allow_failure: true
+  tags:
+    - infrastructure
+  image: ${CI_IMAGE_PYTHON}
+  script:
+    - exit ${RETURNCODE}
+
+test:pass:
+  extends:
+    - .test_simple
+  variables:
+    RETURNCODE: 0
+
+test:fail:
+  extends:
+    - .test_simple
+  variables:
+    RETURNCODE: 1
+
+smoketest-imx6guf:
+  extends:
+    - .test
+  stage: test
+  needs: [build-imx6guf-fake]
+  variables:
+    TEST_REPO_BRANCH: dunfell
+    CI_PARAM_PLATFORMS: imx6guf
+    CI_PARAM_EXTRA: --all-devices
+    CI_PARAM_TEST_SUITE: boot.jinja2
+    CI_PARAM_BUILDJOB: build-imx6guf-fake
+
diff --git a/build-jobs-yocto.jinja2 b/build-jobs-yocto.yml.jinja2
similarity index 67%
rename from build-jobs-yocto.jinja2
rename to build-jobs-yocto.yml.jinja2
index bd91a78..42295ef 100644
--- a/build-jobs-yocto.jinja2
+++ b/build-jobs-yocto.yml.jinja2
@@ -7,22 +7,11 @@
 include:
   - project: '{{ CI_PROJECT_ROOT_NAMESPACE }}/yocto/infrastructure/gitlab-ci'
     ref: {{ GITLAB_CI_REVISION }}
-    file: 'common.yml'
-  - project: '{{ CI_PROJECT_ROOT_NAMESPACE }}/yocto/infrastructure/gitlab-ci'
-    ref: {{ GITLAB_CI_REVISION }}
-    file: 'manifest-build.yml'
-  - project: '{{ CI_PROJECT_ROOT_NAMESPACE }}/yocto/infrastructure/gitlab-ci'
-    ref: {{ GITLAB_CI_REVISION }}
-    file: 'manifest-package.yml'
-
-variables:
-  MASTER_BRANCH_MANIFEST: {{ MASTER_BRANCH_MANIFEST }}
-
-workflow:
-  rules:
-    # This rule is needed, as otherwise the workflow:rules from
-    # the parent job seem to be used and prevent the pipeline generation
-    - if: $CI_PIPELINE_SOURCE == "parent_pipeline"
+    file:
+      - build-common.yml
+      - build-yocto.yml
+      - common.yml
+      - manifest-package.yml
 
 stages:
   - infrastructure
@@ -31,15 +20,17 @@ stages:
   - deploy
   - uploadftp
 
+variables:
+  MASTER_BRANCH: {{ MASTER_BRANCH }}
+
 # --------------------------------------------------------------------------------------
 # Stage: infrastructure
 # --------------------------------------------------------------------------------------
-
 changelog:
   extends: .infrastructure
   script: .gitlab-ci/scripts/changelog_generator.py
               --token=${GITBOT_TOKEN}
-              --branch=${MASTER_BRANCH_MANIFEST}
+              --branch=${MASTER_BRANCH}
               > changelog.md
   artifacts:
     expire_in: 4 weeks
@@ -49,7 +40,6 @@ changelog:
 # --------------------------------------------------------------------------------------
 # Generated build jobs
 # --------------------------------------------------------------------------------------
-
 {% if CI_PARAM_DISTRO is not defined %}
 {% set CI_PARAM_DISTRO = "guf-wayland" %}
 {% endif %}
@@ -71,6 +61,12 @@ build-{{ machine }}:
     CI_PARAM_MACHINE: {{ machine }}
     CI_PARAM_DISTRO: {{ CI_PARAM_DISTRO }}
     CI_PARAM_IMAGE: {{ CI_PARAM_IMAGE }}
+    INSTALLSCRIPT: "fng-install.sh"
+  artifacts:
+    paths:
+      - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/images/{{ machine }}/*
+      - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/licenses/**/license.manifest
+      - build.env
 
 # Build jobs for the sdk
 buildsdk-{{ machine }}:
@@ -80,6 +76,10 @@ buildsdk-{{ machine }}:
     CI_PARAM_MACHINE: {{ machine }}
     CI_PARAM_DISTRO: {{ CI_PARAM_DISTRO }}
     CI_PARAM_IMAGE: {{ CI_PARAM_IMAGE }}
+  artifacts:
+    paths:
+      - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/sdk/*
+      - build.env
 
 # Deploy jobs for the yocto image
 deployimage-{{ machine }}:
@@ -87,47 +87,68 @@ deployimage-{{ machine }}:
   stage: deploy
   needs: [build-{{ machine }}, changelog]
 
+# Deploy jobs for the yocto sdk
+deploysdk-{{ machine }}:
+  extends: .deployimage
+  stage: deploy
+  needs: [buildsdk-{{ machine }}]
+
 # Upload ftp jobs for the yocto image
 uploadftp-{{ machine }}:
   extends:
     - .uploadftp
-    - .uploadsdkftp
   stage: uploadftp
-  needs: [build-{{ machine }}, buildsdk-{{ machine }}, changelog]
+  needs: [build-{{ machine }}, changelog]
+
+# Upload ftp jobs for the yocto sdk
+uploadftp-sdk-{{ machine }}:
+  extends:
+    - .uploadftp
+  stage: uploadftp
+  needs: [ buildsdk-{{ machine }}]
+
 
 # Run platform tests for this machine which the yocto image
 # This is a little hacky as we need to match the machine name to
 # the available platforms
-{% if machine == 'imx6guf' %}
+{% if machine == 'seco-mx6' or machine == 'imx6guf' %}
 {% set platforms = "santaro santoka santino santino-lt" %}
-{% elif machine == 'imx6ullguf' %}
+{% set lavamachine = "imx6guf" %}
+{% elif machine == 'seco-mx6ull' or machine == 'imx6ullguf' %}
 {% set platforms = "nallino" %}
-{% elif machine == 'imx8mguf' %}
+{% set lavamachine = "imx6ullguf" %}
+{% elif machine == 'seco-mx8mm' or machine == 'imx8mguf'  %}
 {% set platforms = "tanaro" %}
+{% set lavamachine = "imx8mguf" %}
 {% else %}
 {% set platforms = '' %}
+{% set lavamachine = '' %}
 {% endif %}
 
 {% if platforms %}
 # Run smoketests for this machine which the yocto image
 smoketest:{{ machine }}:
-  extends: .test
+  extends:
+    - .test
   stage: test
   needs:
     - job: build-{{ machine }}
   variables:
-    CI_PARAM_MACHINE: {{ machine }}
-    CI_PARAM_PLATFORMS: {{ machine }}
+    CI_PARAM_BUILDJOB: build-{{ machine }}
+    CI_PARAM_MACHINE: {{ lavamachine }}
+    CI_PARAM_PLATFORMS: {{ lavamachine }}
     CI_PARAM_TEST_SUITE: boot.jinja2
     CI_PARAM_EXTRA: --all-devices
 
 platformtest:{{ machine }}:
-  extends: .test
+  extends:
+    - .test
   stage: test
   needs:
     - job: build-{{ machine }}
   variables:
-    CI_PARAM_MACHINE: {{ machine }}
+    CI_PARAM_BUILDJOB: build-{{ machine }}
+    CI_PARAM_MACHINE: {{ lavamachine }}
     CI_PARAM_PLATFORMS: {{ platforms }}
 {% endif %}
 
@@ -137,12 +158,18 @@ platformtest:{{ machine }}:
 
 # Build jobs for the fng system image
 build-{{ machine }}-fngsystem:
-  extends: .buildfng
+  extends: .buildimage
   stage: build
   variables:
     CI_PARAM_MACHINE: {{ machine }}
     CI_PARAM_DISTRO: {{ CI_PARAM_DISTRO_FNG }}
     CI_PARAM_IMAGE: {{ CI_PARAM_IMAGE_FNG }}
+    INSTALLSCRIPT: "fngsystem-self-update.sh"
+  artifacts:
+    paths:
+      - build-{{ CI_PARAM_DISTRO_FNG }}-{{ machine }}/tmp/deploy/images/{{ machine }}/*
+      - build-{{ CI_PARAM_DISTRO_FNG }}-{{ machine }}/tmp/deploy/licenses/**/license.manifest
+      - build.env
 
 # Deploy jobs for the fngsystem image
 deployimage-{{ machine }}-fngsystem:
diff --git a/build-yocto.yml b/build-yocto.yml
new file mode 100644
index 0000000..43d55e5
--- /dev/null
+++ b/build-yocto.yml
@@ -0,0 +1,145 @@
+---
+
+.collect_srcrevs: &collect_srcrevs
+  # write all package AUTOREVS to file
+  - |-
+    SRCREVS_FILE="${CI_PROJECT_DIR}/${BUILDPATH}/${IMAGEPATH}/BUILD_SRCREVS.log"
+    if [ -d "$( dirname "${SRCREVS_FILE}" )" ];then
+        buildhistory-collect-srcrevs > ${SRCREVS_FILE}
+        echo "${LOGPREFIX} buildhistory-collect-srcrevs:"
+        cat ${SRCREVS_FILE}
+    fi
+
+.dump_install_command: &dump_install_command
+  # print install instructions
+  - |-
+    SCRIPT="${CI_PROJECT_DIR}/${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}"
+    if [ ! -f "${SCRIPT}" ]; then
+        echo "Install script missing, searched for '$SCRIPT'"
+        exit 1
+    fi
+
+    if [ "$CI_PROJECT_VISIBILITY" = "public" ];then
+    cat <<-EOF
+    ==============================
+        Install the image:
+
+    FNG="$FNG_INSTALL_URL"
+    curl --location "\$FNG" | sh -s -- --url="\$(dirname "\$FNG")"
+    ==============================
+    EOF
+    else
+    cat <<-EOF
+    ==============================
+        Install the image:
+
+    export GITLAB_TOKEN=<your_access_token>
+    FNG="$FNG_INSTALL_URL"
+    curl --location --header "PRIVATE-TOKEN: \$GITLAB_TOKEN" "\$FNG" \
+                              | sh -s -- --url="\$(dirname "\$FNG")"
+    ==============================
+    EOF
+    fi
+
+.build_script: &build_script
+  # setup build environment
+  - echo "${LOGPREFIX} Build configuration MACHINE=${CI_PARAM_MACHINE}
+      DISTRO=${CI_PARAM_DISTRO} IMAGE=${CI_PARAM_IMAGE}"
+  - echo "BUILD_MACHINE=$CI_PARAM_MACHINE" > build.env
+  - echo "BUILD_IMAGE=$CI_PARAM_IMAGE" >> build.env
+  - echo "BUILD_DISTRO=$CI_PARAM_DISTRO" >> build.env
+  - echo "BUILD_PATH_IMAGE=${BUILDPATH}/${IMAGEPATH}" >> build.env
+  - echo "BUILD_PATH_SDK=${BUILDPATH}/${SDKPATH}" >> build.env
+  - echo "BUILD_PATH_LICENSE=${BUILDPATH}/${LICENSESPATH}" >> build.env
+  - echo "FNG_INSTALL_URL=${JOB_URL}${FNG_INSTALL_PATH}" >> build.env
+  - source build.env
+  - echo "${LOGPREFIX} Using build dir ${BUILDPATH}"
+  - export MACHINE="${CI_PARAM_MACHINE}"
+  - export DISTRO="${CI_PARAM_DISTRO}"
+  - export EULA="1"
+  - source ./"${SETUPSCRIPT}" "${BUILDPATH}"
+  # start build
+  - echo -e "section_start:`date +%s`:bitbake_run\r\e[0KBitbake Log"
+  - echo "${LOGPREFIX} bitbake ${CI_PARAM_IMAGE} -c ${BITBAKE_TASK}"
+  - bitbake "${CI_PARAM_IMAGE}" -c "${BITBAKE_TASK}"
+  - echo -e "section_end:`date +%s`:bitbake_run\r\e[0K"
+
+.buildimage:
+  extends:
+    - .buildbase
+  needs: []
+  variables:
+    GIT_STRATEGY: none
+    SETUPSCRIPT: "setup-environment"
+    BITBAKE_TASK: "build"
+    LOGPREFIX: "CI:build:"
+    BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}"
+    IMAGEBASEPATH: "tmp/deploy/images/"
+    IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}"
+    LICENSESPATH: "tmp/deploy/licenses"
+    SDKPATH: "tmp/deploy/sdk/"
+    JOB_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/"
+    FNG_INSTALL_PATH: "${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}"
+  before_script:
+    - !reference [.docker_check]
+    - !reference [.setup_ssh]
+    - !reference [.repo_checkout]
+  script:
+    - *build_script
+    - *collect_srcrevs
+    - *dump_install_command
+  artifacts:
+    reports:
+      dotenv: build.env
+
+# --------------------------------------------------------------------------------------
+# Stage: build
+# --------------------------------------------------------------------------------------
+.buildsdk:
+  extends:
+    - .buildimage
+  stage: build
+  rules:
+    - when: manual
+      allow_failure: true
+  script:
+    - *build_script
+  variables:
+    BITBAKE_TASK: "populate_sdk"
+
+# --------------------------------------------------------------------------------------
+# Stage: deploy
+# --------------------------------------------------------------------------------------
+.deployimage:
+  extends:
+    - .infrastructure
+    - .package
+  rules:
+    - when: manual
+      allow_failure: true
+  artifacts:
+    paths:
+      - release/**/**/*
+    reports:
+      dotenv: package.env
+  timeout: 30m
+
+# --------------------------------------------------------------------------------------
+# Stage: uploadftp
+# --------------------------------------------------------------------------------------
+.uploadftp:
+  variables:
+    CI_PARAM_PACKAGE_FTP: "true"
+  extends:
+    - .infrastructure
+    - .package
+  rules:
+    # FIXME: This is a workaround to prevent customer releases being uploaded to our
+    # public FTP area. It should be removed as soon as we support uploading to different
+    # FTP target folders.
+    - if: $HIDE_FTP_UPLOAD_STAGE
+      when: never
+    - if: $CI_COMMIT_TAG
+      when: manual
+      allow_failure: true
+  timeout: 30m
diff --git a/common.yml b/common.yml
index 0037ebc..0d3f2fe 100644
--- a/common.yml
+++ b/common.yml
@@ -5,7 +5,7 @@
 variables:
   # CI_IMAGES_BASEPATH: Environment variable configure in gitlab
   CI_IMAGES_PATH: ${CI_IMAGES_BASEPATH}/ci-images
-  CI_IMAGES_REV: 44965ccdd847f1e077670f49d546047f8ad0110c
+  CI_IMAGES_REV: e6a404c3e1a919f54feb83f1bc061ce171220437
   CI_IMAGE_PYTHON: "${CI_IMAGES_PATH}/python/3.9:${CI_IMAGES_REV}"
   CI_IMAGE_YOCTO: "${CI_IMAGES_PATH}/yocto-build/ubuntu-20.04:${CI_IMAGES_REV}"
   # Include git submodules
@@ -14,12 +14,6 @@ variables:
   # may take too long
   GIT_DEPTH: 1
 
-  DEPLOYPATH_TEST: "/artifacts/${CI_JOB_ID}/"
-  GITLAB_SERVER: "${CI_SERVER_HOST}:${CI_SERVER_SSH_PORT}"
-  GIT_BASE_URL: "ssh://git@${GITLAB_SERVER}/${CI_PROJECT_ROOT_NAMESPACE}"
-  TESTS_GIT_URL: "${GIT_BASE_URL}/yocto/tests.git"
-
-
 # --------------------------------------------------------------------------------------
 # Common infrastructure settings
 # --------------------------------------------------------------------------------------
@@ -28,7 +22,7 @@ variables:
   tags:
     - infrastructure
   timeout: 10m
-  image: secodocker/edgehog-builder:latest #"${CI_IMAGE_PYTHON}"
+  image: "${CI_IMAGE_PYTHON}"
   needs: []
   variables:
     # Include git submodules
@@ -42,7 +36,7 @@ variables:
     - if: $CI_PIPELINE_SOURCE == "parent_pipeline"
       when: never
     # Usually run this job only on non-master branches, i.e. in merge requests
-    - if: $CI_COMMIT_REF_NAME != $MASTER_BRANCH_MANIFEST
+    - if: $CI_COMMIT_REF_NAME != $MASTER_BRANCH
     # Additionally run this job if pipeline was triggered by the API, which happens e.g.
     # when no pipeline has run in a merge request, so a full build pipeline is needed on
     # the master
diff --git a/docs/automatic-manifest-integration.md b/docs/automatic-manifest-integration.md
index 038128a..c018c63 100644
--- a/docs/automatic-manifest-integration.md
+++ b/docs/automatic-manifest-integration.md
@@ -188,7 +188,6 @@ the git revision in the `SRCREV.conf` file on project changes.
 
 2. Add an approriate `.gitlab-ci.yml` file. For example, copy it from the
    repository [egalxi2c][8]. Modify the following variables in the file:
-   * `MASTER_BRANCH_PROJECT`: Set the project master branch
    * `BB_RECIPE_NAME`: Set the name of the bitbake recipe
 
 3. Create a corresponding entry in the `SRCREV.conf` file of the manifest repo:\
diff --git a/docs/manifest-pipeline.md b/docs/manifest-pipeline.md
index d4f8ec8..27ceb66 100644
--- a/docs/manifest-pipeline.md
+++ b/docs/manifest-pipeline.md
@@ -45,7 +45,9 @@ dynamic-child-pipeline feature. [See gitlab docs.][1]
 
 There is a *'generate-build-jobs'* job, that creates a yaml file containing the
 pipeline with all needed jobs.
-There are the following CI variables controlling the content:
+There are the following CI variables in the 'generate-build-jobs' job controlling
+the content (make sure these are not set in a more global scope, as this 
+would overwrite the settings in the generated yml):
 
 * `CI_PARAM_MACHINES`: Space separated list of machines to build for, like "santaro santoka" 
 * `CI_PARAM_IMAGE`: The name of the image to build. If set to an empty string, 
diff --git a/manifest-build.yml b/manifest-build.yml
deleted file mode 100644
index 7037cc6..0000000
--- a/manifest-build.yml
+++ /dev/null
@@ -1,268 +0,0 @@
----
-variables:
-  BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}"
-  IMAGEBASEPATH: "tmp/deploy/images/"
-  IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}"
-  LICENSESPATH: "tmp/deploy/licenses"
-  SDKPATH: "tmp/deploy/sdk/"
-
-.docker_check: &docker_check
-  # Check if the build folder is empty. Sometimes the docker volume for the build is not
-  # removed afterwards (e.g. in case of a timeout), then a follow-up build might fail.
-  - |-
-    if [ "$(ls -A)" ]; then
-        echo "ERROR: Build folder is not empty. This might be the case because the" \
-             "docker volume has not been not removed in a previous job. Please check" \
-             "the docker container \"$CI_JOB_IMAGE\" on the GitLab runner" \
-             "\"$CI_RUNNER_SHORT_TOKEN\" ($CI_RUNNER_DESCRIPTION) and remove any" \
-             "attached volumes from it."
-        exit 1
-    fi
-
-.setup_ssh: &setup_ssh
-  # Setup ssh key to access private repos
-  # https://docs.gitlab.com/ee/ci/ssh_keys/#ssh-keys-when-using-the-docker-executor
-  # An SSH keypair has been generated for the manifest's pipeline to be able to access
-  # all private repositories in the manifest. The private key has been stored in the
-  # CI/CD variable GITLAB_PRIVATE_KEY of the manifest repository. The public key has
-  # been added as a deploy key in GitLab's repository settings for all contained
-  # repositories.
-  - echo "${LOGPREFIX} Setup SSH"
-  - eval $(ssh-agent -s)
-  - echo "$GITLAB_PRIVATE_KEY" | tr -d '\r' | ssh-add -
-  - mkdir -p ~/.ssh
-  - chmod 700 ~/.ssh
-  # Add GitLab server to known hosts
-  # https://docs.gitlab.com/ee/ci/ssh_keys/#verifying-the-ssh-host-keys
-  # In order to access the private repositories via SSH, the GitLab server has to be
-  # added to the known_hosts file. The host keys were determined using the command
-  #   ssh-keyscan [-p port] <gitlab-server>
-  # and have been stored in the GITLAB_KNOWN_HOSTS CI/CD variable.
-  - echo "$GITLAB_KNOWN_HOSTS" >> ~/.ssh/known_hosts
-  - chmod 644 ~/.ssh/known_hosts
-
-.repo_checkout: &repo_checkout
-  - echo "${LOGPREFIX} Perform repo checkout"
-  - cd ${CI_PROJECT_DIR}
-  - repo init --submodules -u ${CI_REPOSITORY_URL}
-      -b refs/pipelines/${CI_PIPELINE_ID}
-  - repo sync --detach --current-branch --no-tags --force-remove-dirty
-      --optimized-fetch --force-sync
-
-.collect_srcrevs: &collect_srcrevs
-  # write all package AUTOREVS to file
-  - |-
-    SRCREVS_FILE="${CI_PROJECT_DIR}/${BUILDPATH}/${IMAGEPATH}/BUILD_SRCREVS.log"
-    if [ -d "$( dirname "${SRCREVS_FILE}" )" ];then
-        buildhistory-collect-srcrevs > ${SRCREVS_FILE}
-        echo "${LOGPREFIX} buildhistory-collect-srcrevs:"
-        cat ${SRCREVS_FILE}
-    fi
-
-.dump_install_command: &dump_install_command
-  # print install instructions
-  - |-
-    for i in ${INSTALLSCRIPTS};do
-    SCRIPT="${CI_PROJECT_DIR}/${BUILDPATH}/${IMAGEPATH}/${i}"
-    if [[ -f "${SCRIPT}" ]]; then
-      cat <<-EOF
-    ==============================
-        Install the image:
-
-    export GITLAB_TOKEN=<your_access_token>
-    FNG="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/${BUILDPATH}/${IMAGEPATH}/${i}"
-    curl --location --header "PRIVATE-TOKEN: \$GITLAB_TOKEN" "\$FNG" \
-                              | sh -s -- --url="\$(dirname "\$FNG")"
-    ==============================
-    EOF
-    fi
-    done
-
-.build_script: &build_script
-  # setup build environment
-  - echo "${LOGPREFIX} Build configuration MACHINE=${CI_PARAM_MACHINE}
-      DISTRO=${CI_PARAM_DISTRO} IMAGE=${CI_PARAM_IMAGE}"
-  - echo "BUILD_MACHINE=$CI_PARAM_MACHINE" > build.env
-  - echo "BUILD_IMAGE=$CI_PARAM_IMAGE" >> build.env
-  - echo "BUILD_DISTRO=$CI_PARAM_DISTRO" >> build.env
-  - echo "${LOGPREFIX} Using build dir ${BUILDPATH}"
-  - export MACHINE="${CI_PARAM_MACHINE}"
-  - export DISTRO="${CI_PARAM_DISTRO}"
-  - export EULA="1"
-  - source ./"${SETUPSCRIPT}" "${BUILDPATH}"
-  # start build
-  - echo -e "section_start:`date +%s`:bitbake_run\r\e[0KBitbake Log"
-  - echo "${LOGPREFIX} bitbake ${CI_PARAM_IMAGE} -c ${BITBAKE_TASK}"
-  - bitbake "${CI_PARAM_IMAGE}" -c "${BITBAKE_TASK}"
-  - echo -e "section_end:`date +%s`:bitbake_run\r\e[0K"
-
-.build:
-  variables:
-    GIT_STRATEGY: none
-    SETUPSCRIPT: "setup-environment"
-    INSTALLSCRIPTS: "fng-install.sh fngsystem-self-update.sh"
-    BITBAKE_TASK: "build"
-    LOGPREFIX: "CI:build:"
-  before_script:
-    - *docker_check
-    - *setup_ssh
-    - *repo_checkout
-  script:
-    - *build_script
-    - *collect_srcrevs
-    - *dump_install_command
-  artifacts:
-    paths:
-      - "${BUILDPATH}/${IMAGEPATH}/*"
-      - "${BUILDPATH}/${LICENSESPATH}/**/license.manifest"
-    reports:
-      dotenv: build.env
-
-.prepare_test:
-  variables:
-    LOGPREFIX: "CI:test:"
-  before_script:
-    - *setup_ssh
-    - |-
-      # Copy artifacts to local server for automated tests
-      echo "${LOGPREFIX} Copy files for automated tests using package_release"
-      .gitlab-ci/scripts/package_release.py \
-        --images-dir="${BUILDPATH}/${IMAGEPATH}" \
-        --outputdir-local="${DEPLOYPATH_TEST}"
-
-# --------------------------------------------------------------------------------------
-# Stage: build
-# --------------------------------------------------------------------------------------
-.buildbase:
-  tags:
-    - builds
-  timeout: 8h
-  interruptible: true
-  image:
-    name: "${CI_IMAGE_YOCTO}"
-    # Override entrypoint so we can pass --id to set the UID and GID for the
-    # user that is created in the container. This is a feature of the
-    # crops/poky images. See poky-entry.py for details.
-    entrypoint:
-      - "/usr/bin/distro-entry.sh"
-      - "/usr/bin/dumb-init"
-      - "--"
-      - "/usr/bin/poky-entry.py"
-      - "--id=118:998"
-  artifacts:
-    expire_in: 4 weeks
-
-.buildimage:
-  extends:
-    - .buildbase
-    - .build
-  needs: []
-
-.buildfng:
-  extends:
-    - .buildimage
-  variables:
-    CI_PARAM_IMAGE: ${CI_PARAM_IMAGE_FNG}
-    CI_PARAM_DISTRO: ${CI_PARAM_DISTRO_FNG}
-
-.buildsdk:
-  extends:
-    - .buildimage
-    - .package
-  stage: build
-  rules:
-    - when: manual
-      allow_failure: true
-  variables:
-    BITBAKE_TASK: "populate_sdk"
-  artifacts:
-    paths:
-      - "${BUILDPATH}/${SDKPATH}/*.manifest"
-      - "${BUILDPATH}/${SDKPATH}/*.json"
-    reports:
-      dotenv: package.env
-
-# ---------------------------------------------------------------------------------------
-# Stage: test
-# ---------------------------------------------------------------------------------------
-.test:
-  extends:
-    - .infrastructure
-    - .prepare_test
-  timeout: 1h
-  rules:
-    - when: manual
-      allow_failure: true
-  variables:
-    # Include git submodules
-    GIT_SUBMODULE_STRATEGY: recursive
-    CI_PARAM_TEST_SUITE: '{platform}.jinja2'
-    CI_PARAM_EXTRA: --nop
-  artifacts:
-    when: always
-    paths:
-      - "results/**"
-    reports:
-      junit: results/results-*.xml
-  after_script:
-    - rm -r "${DEPLOYPATH_TEST}"
-
-  script:
-    - |-
-      # Submit tests to lava server
-      RELEASE=$(ls ${DEPLOYPATH_TEST}/)
-      INSTALLSCRIPT_ABS="$DEPLOYPATH_TEST/$RELEASE/$CI_PARAM_MACHINE/fng-install.sh"
-      FNG_INSTALL_URL="${ARTIFACTS_HOST_URL}/${INSTALLSCRIPT_ABS#/*/}"
-      .gitlab-ci/scripts/submit_test.py \
-          --fng-install "${FNG_INSTALL_URL}" \
-          --name \
-          "Gitlab build test ${CI_PARAM_MACHINE} ${RELEASE} ${CI_PIPELINE_ID}" \
-          --results-path "results" \
-          --test-repo ${TESTS_GIT_URL} \
-          --test-repo-branch ${MASTER_BRANCH_MANIFEST} \
-          --test-plan ${CI_PARAM_TEST_SUITE} \
-          ${CI_PARAM_EXTRA} \
-          ${CI_PARAM_PLATFORMS}
-
-# --------------------------------------------------------------------------------------
-# Stage: deploy
-# --------------------------------------------------------------------------------------
-.deployimage:
-  extends:
-    - .infrastructure
-    - .package
-  rules:
-    - when: manual
-      allow_failure: true
-  script:
-    # Workaround: We need a command in the script section to be able to run the
-    # after_script section of the package step.
-    - echo
-  artifacts:
-    paths:
-      - release/**/**/*
-    reports:
-      dotenv: package.env
-
-# --------------------------------------------------------------------------------------
-# Stage: uploadftp
-# --------------------------------------------------------------------------------------
-.uploadftp:
-  variables:
-    CI_PARAM_PACKAGE_FTP: "true"
-  extends:
-    - .infrastructure
-    - .package
-  rules:
-    - if: $CI_COMMIT_TAG
-      when: manual
-      allow_failure: true
-  script:
-    # Workaround: We need a command in the script section to be able to run the
-    # after_script section of the package step.
-    - echo
-  timeout: 30m
-
-.uploadsdkftp:
-  variables:
-    ARTIFACTS_SDK_PATH: "$LOCALDIR/$BUILD_MACHINE/sdk"
diff --git a/manifest-integration-ci-test.yml b/manifest-integration-ci-test.yml
deleted file mode 100644
index 63e0764..0000000
--- a/manifest-integration-ci-test.yml
+++ /dev/null
@@ -1,15 +0,0 @@
----
-include:
-  - local: manifest-integration.yml
-
-variables:
-  # FIXME: This variable is used in the manifest-integration's build stage for the
-  # trigger project. Due to a missing feature in GitLab, we cannot use any variables
-  # here and have to hard-code 'seco-ne' instead of using CI_PROJECT_ROOT_NAMESPACE.
-  # (https://gitlab.com/gitlab-org/gitlab/-/issues/249583)
-  MANIFEST_PROJECT: seco-ne/yocto/infrastructure/ci-test/minimal-manifest
-
-  # The master branch is hardcoded here, because it cannot be determined automatically.
-  # Has to be modified for new branches, e.g. new Yocto versions or fix releases.
-  MASTER_BRANCH_MANIFEST: master
-  MASTER_BRANCH_PROJECT: master
diff --git a/manifest-integration-jobs.yml.jinja2 b/manifest-integration-jobs.yml.jinja2
new file mode 100644
index 0000000..1553845
--- /dev/null
+++ b/manifest-integration-jobs.yml.jinja2
@@ -0,0 +1,88 @@
+---
+# --------------------------------------------------------------------------------------
+# Global
+# --------------------------------------------------------------------------------------
+include:
+  # FIXME: see FIXME comments in manifest-integration-pipelines.yml.jinja2
+  # - project: '${CI_PROJECT_ROOT_NAMESPACE}/yocto/infrastructure/gitlab-ci'
+  #   ref: ${GITLAB_CI_REVISION}
+  - project: {{ CI_PROJECT_ROOT_NAMESPACE }}/yocto/infrastructure/gitlab-ci
+    ref: {{ GITLAB_CI_REVISION }}
+    file: common.yml
+
+workflow:
+  rules:
+    - if: $CI_PIPELINE_SOURCE == "parent_pipeline"
+
+stages:
+  - manifest-integration-jobs
+
+# --------------------------------------------------------------------------------------
+# Merge request pipeline
+# --------------------------------------------------------------------------------------
+integrate:
+  extends: .infrastructure
+  stage: manifest-integration-jobs
+  rules:
+    - if: $CI_MERGE_REQUEST_IID
+  cache:
+    policy: push
+  script:
+    - cd ${CI_PROJECT_DIR}
+    - if [ -n "${CI_MERGE_REQUEST_IID}" ];then
+        MERGE_REQUEST="${CI_MERGE_REQUEST_IID}";
+      else
+        MERGE_REQUEST="${CI_OPEN_MERGE_REQUESTS%%,*}";
+      fi
+    - .gitlab-ci/scripts/integrate_into_manifest.py
+        --gitlab-url=${CI_SERVER_URL}
+        --token=${GITBOT_TOKEN}
+        --manifest-project=${TARGET_PROJECT}
+        --manifest-file=${MANIFEST_FILE}
+        --manifest-branch=${TARGET_BRANCH}
+        --project=${CI_PROJECT_PATH}
+        --merge-request=${MERGE_REQUEST}
+        --save-revision-to=manifest_revision
+        --recipe-name=${BB_RECIPE_NAME}
+  artifacts:
+    paths:
+      - manifest_revision
+
+build:
+  stage: manifest-integration-jobs
+  needs: ["integrate"]
+  rules:
+    # Do not run build if the "skip build" label is set on the merge request
+    - if: $CI_MERGE_REQUEST_LABELS =~ /skip build/
+      when: never
+    - if: $CI_MERGE_REQUEST_IID
+  trigger:
+    # FIXME: see FIXME comments in manifest-integration-pipelines.yml.jinja2
+    # project: $TARGET_PROJECT
+    project: {{ TARGET_PROJECT }}
+    branch: "integrate/${CI_PROJECT_NAME}/${CI_COMMIT_REF_NAME}/into/${TARGET_BRANCH}"
+    strategy: depend
+
+# --------------------------------------------------------------------------------------
+# Master pipeline
+# --------------------------------------------------------------------------------------
+merge:
+  extends: .infrastructure
+  stage: manifest-integration-jobs
+  rules:
+    - if: $CI_COMMIT_BRANCH == $SOURCE_BRANCH
+  script:
+    - cd ${CI_PROJECT_DIR}
+    - .gitlab-ci/scripts/merge_into_manifest.py
+        --gitlab-url=${CI_SERVER_URL}
+        --token=${GITBOT_TOKEN}
+        --manifest-project=${TARGET_PROJECT}
+        --manifest-branch=${TARGET_BRANCH}
+        --project=${CI_PROJECT_PATH}
+        --project-branch=${SOURCE_BRANCH}
+        --commit=${CI_COMMIT_SHA}
+        --save-revision-to=manifest_revision
+        --recipe-name=${BB_RECIPE_NAME}
+  artifacts:
+    paths:
+      - manifest_revision
diff --git a/manifest-integration-pipelines.yml.jinja2 b/manifest-integration-pipelines.yml.jinja2
new file mode 100644
index 0000000..6eb9ee6
--- /dev/null
+++ b/manifest-integration-pipelines.yml.jinja2
@@ -0,0 +1,84 @@
+---
+# --------------------------------------------------------------------------------------
+# Global
+# --------------------------------------------------------------------------------------
+include:
+  - project: {{ CI_PROJECT_ROOT_NAMESPACE }}/yocto/infrastructure/gitlab-ci
+    ref: {{ GITLAB_CI_REVISION }}
+    file: common.yml
+
+workflow:
+  rules:
+    - if: $CI_PIPELINE_SOURCE == "parent_pipeline"
+
+stages:
+  - manifest-integration-pipelines
+
+# --------------------------------------------------------------------------------------
+# Generate job
+# --------------------------------------------------------------------------------------
+# Use one single job to generate multiple yaml files for the downstream pipelines.
+# FIXME: This is only necessary due to a GitLab limitation:
+# https://gitlab.com/gitlab-org/gitlab/-/issues/347469
+# We work around this by generating manifest-integration-jobs.yml from a Jinja2 template
+# and insert the trigger project via a Jinja2 variable.
+# The issue is already fixed and will be released in GitLab 15.3:
+# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/92346
+# As soon as we update to this version, we can get rid of the generate job and convert
+# the Jinja2 template to a simple YAML file.
+generate:
+  extends:
+    - .infrastructure
+  stage: manifest-integration-pipelines
+  script:
+    # The job generation script implicitly passes the OS environment to the template, so
+    # that the template has access to all GitLab CI variables. Hence there is no need
+    # to explicitly pass any of them as command line arguments.
+{% for integration in INTEGRATION.split('\n') %}
+{% set SOURCE_BRANCH, TARGET_PROJECT, TARGET_BRANCH = integration.split(':') %}
+{% if (CI_MERGE_REQUEST_TARGET_BRANCH_NAME is defined
+      and SOURCE_BRANCH == CI_MERGE_REQUEST_TARGET_BRANCH_NAME)
+      or SOURCE_BRANCH == CI_COMMIT_REF_NAME %}
+    - TARGET_PROJECT={{ TARGET_PROJECT }}
+        .gitlab-ci/scripts/generate_job_from_template.py
+        --template=.gitlab-ci/manifest-integration-jobs.yml.jinja2
+        > manifest-integration-jobs-{{ loop.index }}.yml
+{% endif %}
+{% endfor %}
+  artifacts:
+    expire_in: 4 weeks
+    paths:
+      - manifest-integration-jobs-*.yml
+
+# --------------------------------------------------------------------------------------
+# Trigger jobs
+# --------------------------------------------------------------------------------------
+{% for integration in INTEGRATION.split('\n') %}
+{% set SOURCE_BRANCH, TARGET_PROJECT, TARGET_BRANCH = integration.split(':') %}
+{% if (CI_MERGE_REQUEST_TARGET_BRANCH_NAME is defined
+      and SOURCE_BRANCH == CI_MERGE_REQUEST_TARGET_BRANCH_NAME)
+      or SOURCE_BRANCH == CI_COMMIT_REF_NAME %}
+
+{{ TARGET_PROJECT }}:{{ TARGET_BRANCH }}:
+  stage: manifest-integration-pipelines
+  needs:
+    - generate
+  variables:
+    SOURCE_BRANCH: {{ SOURCE_BRANCH }}
+    TARGET_PROJECT: {{ TARGET_PROJECT }}
+    TARGET_BRANCH: {{ TARGET_BRANCH }}
+    GITLAB_CI_REVISION: {{ GITLAB_CI_REVISION }}
+    MANIFEST_FILE: {{ MANIFEST_FILE }}
+    BB_RECIPE_NAME: {{ BB_RECIPE_NAME }}
+  trigger:
+    include:
+      # FIXME: Use these settings after switching from jinja2 to yaml (see above)
+      # project: '${CI_PROJECT_ROOT_NAMESPACE}/yocto/infrastructure/gitlab-ci'
+      # ref: ${GITLAB_CI_REVISION}
+      # file: manifest-integration-jobs.yml
+      artifact: manifest-integration-jobs-{{ loop.index }}.yml
+      job: generate
+    strategy: depend
+
+{% endif %}
+{% endfor %}
diff --git a/manifest-integration-yocto.yml b/manifest-integration-yocto.yml
deleted file mode 100644
index e094eb7..0000000
--- a/manifest-integration-yocto.yml
+++ /dev/null
@@ -1,15 +0,0 @@
----
-include:
-  - local: manifest-integration.yml
-
-variables:
-  # FIXME: This variable is used in the manifest-integration's build stage for the
-  # trigger project. Due to a missing feature in GitLab, we cannot use any variables
-  # here and have to hard-code 'seco-ne' instead of using CI_PROJECT_ROOT_NAMESPACE.
-  # (https://gitlab.com/gitlab-org/gitlab/-/issues/249583)
-  MANIFEST_PROJECT: seco-ne/yocto/manifest
-
-  # The master branch is hardcoded here, because it cannot be determined automatically.
-  # Has to be modified for new branches, e.g. new Yocto versions or fix releases.
-  MASTER_BRANCH_MANIFEST: dunfell
-  MASTER_BRANCH_PROJECT: dunfell
diff --git a/manifest-integration.yml b/manifest-integration.yml
index c834579..53691af 100644
--- a/manifest-integration.yml
+++ b/manifest-integration.yml
@@ -6,11 +6,7 @@ include:
   - local: common.yml
 
 stages:
-  - infrastructure
-  - integrate
-  - merge
-  - build
-  - check
+  - manifest-integration
 
 variables:
   MANIFEST_FILE: "default.xml"
@@ -37,140 +33,88 @@ workflow:
     #    the project in the seco-ne namespace (customer sending
     #    change to us). Here the the IDs used below differ.
     #
-    - if: $CI_PROJECT_ROOT_NAMESPACE == "edgehog"
-        && $CI_MERGE_REQUEST_SOURCE_PROJECT_ID == $CI_MERGE_REQUEST_PROJECT_ID
+    - if: $CI_PROJECT_ROOT_NAMESPACE != "seco-ne"
+      when: never
+    - if: $CI_MERGE_REQUEST_SOURCE_PROJECT_ID != $CI_MERGE_REQUEST_PROJECT_ID
+      when: never
+    # FIXME: Unfortunately we cannot use variables in regular expressions due to a
+    # GitLab limitation: https://gitlab.com/gitlab-org/gitlab/-/issues/209904
+    # As soon as this get fixed, use the regex based rules below instead of checking
+    # against the MASTER_BRANCH variable.
+    # Run pipeline if target branch of the merge request has an integration target, i.e.
+    # INTEGRATION contains a line beginning with the target branch followed by a colon.
+    # This also implies that the pipeline runs in merge request context only.
+    # - if: $INTEGRATION =~ /^$CI_MERGE_REQUEST_TARGET_BRANCH_NAME:/m
+    - if: $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == $MASTER_BRANCH
+    # Run pipeline on target branch after merging a merge request.
+    # - if: $INTEGRATION =~ /^$CI_COMMIT_BRANCH:/m
+    - if: $CI_COMMIT_BRANCH == $MASTER_BRANCH
+
+.skip-for-gitlab-ci-integrations:
+  rules:
+    - if: $CI_COMMIT_REF_NAME !~ /^integrate\/gitlab-ci\/.*/
 
 # --------------------------------------------------------------------------------------
-# Stage: infrastructure
+# Manifest integration jobs
 # --------------------------------------------------------------------------------------
-integrate:
-  extends: .infrastructure
-  rules:
-    # Do not integration pipeline for merge requests for integrate/gitlab-ci/ branches
-    # The integration is done from the pipeline in gitlab-ci already
-    - if: $CI_COMMIT_REF_NAME =~ /^integrate\/gitlab-ci\/.*/
-      when: never
-    # We have to make sure that the pipeline runs for the current manifest
-    # master at the time a merge request is created. Otherwise we cannot
-    # guarantee a green master after merging.
-    - if: $CI_MERGE_REQUEST_IID
-    # Explicitly allow externally triggered pipelines in every case
-    - if: $CI_PIPELINE_SOURCE == "pipeline" || $CI_PIPELINE_SOURCE == "api"
-  cache:
-    policy: push
+generate-pipelines:
+  extends:
+    - .infrastructure
+    - .skip-for-gitlab-ci-integrations
+  stage: manifest-integration
   script:
-    - echo "CI_PROJECT_DIR is defined as ${CI_PROJECT_DIR}"
-    - echo "CI_PROJECT_PATH is defined as ${CI_PROJECT_PATH}"
-    - echo "CI_SERVER_URL is defined as ${CI_SERVER_URL}"
-    - echo "CI_MERGE_REQUEST_IID is defined as ${CI_MERGE_REQUEST_IID}"
-    - cd ${CI_PROJECT_DIR}
-    - if [ -n "${CI_MERGE_REQUEST_IID}" ];then
-        MERGE_REQUEST="${CI_MERGE_REQUEST_IID}";
-      else
-        MERGE_REQUEST="${CI_OPEN_MERGE_REQUESTS%%,*}";
-      fi
-    - .gitlab-ci/scripts/integrate_into_manifest.py
-        --gitlab-url=${CI_SERVER_URL}
-        --token=${GITBOT_TOKEN}
-        --manifest-project=${MANIFEST_PROJECT}
-        --manifest-file=${MANIFEST_FILE}
-        --integration-base=${MASTER_BRANCH_MANIFEST}
-        --project=${CI_PROJECT_PATH}
-        --merge-request=${MERGE_REQUEST}
-        --save-revision-to=manifest_revision
-        --recipe-name=${BB_RECIPE_NAME}
+    # The job generation script implicitly passes the OS environment to the template, so
+    # that the template has access to all GitLab CI variables. Hence there is no need
+    # to explicitly pass any of them as command line arguments.
+    - .gitlab-ci/scripts/generate_job_from_template.py
+        --template=.gitlab-ci/manifest-integration-pipelines.yml.jinja2
+        > manifest-integration-pipelines.yml
   artifacts:
+    expire_in: 4 weeks
     paths:
-      - manifest_revision
+      - manifest-integration-pipelines.yml
+
+trigger-pipelines:
+  extends:
+    - .skip-for-gitlab-ci-integrations
+  stage: manifest-integration
+  needs:
+    - generate-pipelines
+  trigger:
+    include:
+      - artifact: manifest-integration-pipelines.yml
+        job: generate-pipelines
+    strategy: depend
+
+check:
+  extends:
+    - .infrastructure
+    - .skip-for-gitlab-ci-integrations
+  stage: manifest-integration
+  rules:
+    - if: $CI_MERGE_REQUEST_IID && $CI_COMMIT_REF_NAME !~ /^integrate\/gitlab-ci\/.*/
+  needs:
+    - trigger-pipelines
+  allow_failure: true
+  script:
+    - cd ${CI_PROJECT_DIR}
+    # Loop over all integrations and check each integration branch
+    - while read -r integration; do
+        SOURCE_BRANCH=$(echo $integration | cut -d':' -f1);
+        TARGET_PROJECT=$(echo $integration | cut -d':' -f2);
+        TARGET_BRANCH=$(echo $integration | cut -d':' -f3);
+        if [[ "$SOURCE_BRANCH" == "$CI_MERGE_REQUEST_TARGET_BRANCH_NAME" ]]; then
+          .gitlab-ci/scripts/check_if_integration_branch_is_up_to_date.py
+            --gitlab-url=${CI_SERVER_URL}
+            --token=${GITBOT_TOKEN}
+            --target-project=${TARGET_PROJECT}
+            --target-branch=${TARGET_BRANCH}
+            --source-project=${CI_PROJECT_PATH}
+            --merge-request=${CI_MERGE_REQUEST_IID}
+            ;
+        fi;
+      done <<< "$INTEGRATION"
 
-# yamllint:
-#   extends: .yamllint
-# 
-# # --------------------------------------------------------------------------------------
-# # Stage: merge
-# # --------------------------------------------------------------------------------------
-# merge:
-#   extends: .infrastructure
-#   stage: merge
-#   rules:
-#     - if: $CI_COMMIT_BRANCH == $MASTER_BRANCH_PROJECT
-#   script:
-#     - cd ${CI_PROJECT_DIR}
-#     - .gitlab-ci/scripts/merge_into_manifest.py
-#         --gitlab-url=${CI_SERVER_URL}
-#         --token=${GITBOT_TOKEN}
-#         --manifest-project=${MANIFEST_PROJECT}
-#         --master-branch=${MASTER_BRANCH_MANIFEST}
-#         --project=${CI_PROJECT_PATH}
-#         --master-branch-project=${MASTER_BRANCH_PROJECT}
-#         --commit=${CI_COMMIT_SHA}
-#         --save-revision-to=manifest_revision
-#         --recipe-name=${BB_RECIPE_NAME}
-#   artifacts:
-#     paths:
-#       - manifest_revision
-# 
-# # --------------------------------------------------------------------------------------
-# # Stage: build
-# # --------------------------------------------------------------------------------------
-# build:
-#   stage: build
-#   rules:
-#     # Do not run build if the "skip build" label is set on the merge request
-#     - if: $CI_MERGE_REQUEST_LABELS =~ /skip build/
-#       when: never
-#     # execute this in MR only and not for integrate/gitlab-ci/ integrations
-#     # branches. These are build after the integration has been done in all
-#     # projects
-#     - if: $CI_MERGE_REQUEST_IID && $CI_COMMIT_REF_NAME !~ /^integrate\/gitlab-ci\/.*/
-#   trigger:
-#     project: !reference [variables, MANIFEST_PROJECT]
-#     branch: "integrate/${CI_PROJECT_NAME}/${CI_COMMIT_REF_NAME}"
-#     strategy: depend
-# 
-# # --------------------------------------------------------------------------------------
-# # Stage: check
-# # --------------------------------------------------------------------------------------
-# check:
-#   extends: .infrastructure
-#   stage: check
-#   rules:
-#     # Do not run check if the "skip build" label is set on the merge request
-#     - if: $CI_MERGE_REQUEST_LABELS =~ /skip build/
-#       when: never
-#     # Do not integration pipeline for merge requests for integrate/gitlab-ci/ branches
-#     # The integration is done from the pipeline in gitlab-ci already
-#     - if: $CI_COMMIT_REF_NAME =~ /^integrate\/gitlab-ci\/.*/
-#       when: never
-#     - if: $CI_MERGE_REQUEST_IID
-#     # Explicitly allow externally triggered pipelines in every case
-#     - if: $CI_PIPELINE_SOURCE == "pipeline" || $CI_PIPELINE_SOURCE == "api"
-#   needs: ["integrate"]
-#   allow_failure: true
-#   script:
-#     - cd ${CI_PROJECT_DIR}
-#     # When running in a trigger pipeline the CII_MERGE_REQUEST_IID is not set
-#     # but CI_OPEN_MERGE_REQUESTS. We use  the first of this comma separated list
-#     # in this case
-#     - if [ -n "${CI_MERGE_REQUEST_IID}" ];then
-#         MERGE_REQUEST="${CI_MERGE_REQUEST_IID}";
-#       else
-#         MERGE_REQUEST="${CI_OPEN_MERGE_REQUESTS%%,*}";
-#       fi
-#     # The 'parent_merge_request' is passed from the trigger
-#     # in case this check job is part of a gitlab-ci integration
-#     # pipeline. It is only used to display the correct MR to run again
-#     # in a failed check
-#     - if [ -n "${parent_merge_request}" ];then
-#         PARENT_MR="--parent-merge-request=${parent_merge_request}";
-#       fi
-#     - .gitlab-ci/scripts/check_if_integration_branch_is_up_to_date.py
-#         --gitlab-url=${CI_SERVER_URL}
-#         --token=${GITBOT_TOKEN}
-#         --manifest-project=${MANIFEST_PROJECT}
-#         --integration-base=${MASTER_BRANCH_MANIFEST}
-#         --project=${CI_PROJECT_PATH}
-#         --merge-request=${MERGE_REQUEST}
-#         --verbose
-#         ${PARENT_MR}
-# 
\ No newline at end of file
+yamllint:
+  extends: .yamllint
+  stage: manifest-integration
diff --git a/manifest-package.yml b/manifest-package.yml
index 3ac3edb..0267c30 100644
--- a/manifest-package.yml
+++ b/manifest-package.yml
@@ -3,57 +3,50 @@
   variables:
     CI_PARAM_PACKAGE_FTP: "false"
     LOGPREFIX: "CI:package:"
-  after_script:
+  script:
     - |-
       # Package release files
       # TODO: At the moment this script is used for
       # - image deployment,
-      # - sdk build,
+      # - sdk deployment,
       # - image and sdk ftp upload
       # - and artifact deployment for automated tests.
       # Some usecases require individual processing, which leads to
       # high script complexity. That should be improved.
 
       echo "${LOGPREFIX} Assemble build variables"
-      # We need to source the build.env file which is generated by the
-      # previous build step. This is a workaround to get the environment
-      # variables in the after_script step.
-      [ -e build.env ] && source build.env
-      BUILDPATH="build-${BUILD_DISTRO}-${BUILD_MACHINE}"
-      [ -z "${ARTIFACTS_IMAGE_PATH}" ] && \
-        ARTIFACTS_IMAGE_PATH="${BUILDPATH}/${IMAGEBASEPATH}/${BUILD_MACHINE}"
-      [ -z "${ARTIFACTS_LICENSES_PATH}" ] && \
-        ARTIFACTS_LICENSES_PATH="${BUILDPATH}/${LICENSESPATH}"
-      [ -z "${ARTIFACTS_SDK_PATH}" ] && \
-        ARTIFACTS_SDK_PATH="${BUILDPATH}/${SDKPATH}"
+      # Variables set by build.env from the build artifacts
+      ARTIFACTS_IMAGE_PATH="$BUILD_PATH_IMAGE"
+      ARTIFACTS_LICENSES_PATH="$BUILD_PATH_LICENSE"
+      ARTIFACTS_SDK_PATH="$BUILD_PATH_SDK"
 
       if ${CI_PARAM_PACKAGE_FTP}; then
           UPLOAD_PARAM="" # don't store as gitlab artifact
           OUTDIR_BASE="/artifacts-ftp"
       else
-          UPLOAD_PARAM="--outputdir-upload=release"
+          UPLOAD_PARAM="--output-dir=release"
           OUTDIR_BASE="/artifacts"
       fi
 
       # If we are on the master branch and a tag is set
       # we tread it as release
-      if [ -n "${CI_COMMIT_TAG}" ];then
+      if [ -n "$CI_COMMIT_TAG" ];then
         outdir="${OUTDIR_BASE}-yocto/Releases"
-        [ "${BUILD_DISTRO}" = "guf-fngsystem" ] && \
+        [ "$BUILD_DISTRO" = "seconorth-fngsystem" ] && \
           outdir="${OUTDIR_BASE}-fngsystem"
       else
         outdir="${OUTDIR_BASE}-yocto/Interne_Releases"
-        [ "${BUILD_DISTRO}" = "guf-fngsystem" ] && \
+        [ "$BUILD_DISTRO" = "seconorth-fngsystem" ] && \
           outdir="${OUTDIR_BASE}-fngsystem/CI_Builds"
       fi
 
       # Print vars for debugging purposes
-      echo "${LOGPREFIX} BUILDPATH=${BUILDPATH}"
-      echo "${LOGPREFIX} ARTIFACTS_IMAGE_PATH=${ARTIFACTS_IMAGE_PATH}"
-      echo "${LOGPREFIX} ARTIFACTS_LICENSES_PATH=${ARTIFACTS_LICENSES_PATH}"
-      echo "${LOGPREFIX} ARTIFACTS_SDK_PATH=${ARTIFACTS_SDK_PATH}"
-      echo "${LOGPREFIX} UPLOAD_PARAM=${UPLOAD_PARAM}"
-      echo "${LOGPREFIX} outdir=${outdir}"
+      echo "${LOGPREFIX} ARTIFACTS_IMAGE_PATH=$ARTIFACTS_IMAGE_PATH"
+      echo "${LOGPREFIX} ARTIFACTS_LICENSES_PATH=$ARTIFACTS_LICENSES_PATH"
+      echo "${LOGPREFIX} ARTIFACTS_SDK_PATH=$ARTIFACTS_SDK_PATH"
+      echo "${LOGPREFIX} UPLOAD_PARAM=$UPLOAD_PARAM"
+      echo "${LOGPREFIX} RELEASE_SUFFIX=$RELEASE_SUFFIX"
+      echo "${LOGPREFIX} outdir=$outdir"
 
       # Check if the package_release script is available
       script=".gitlab-ci/scripts/package_release.py"
@@ -72,7 +65,8 @@
               --images-dir="${ARTIFACTS_IMAGE_PATH}" \
               --licenses-dir="${ARTIFACTS_LICENSES_PATH}" \
               --doc-dir=. \
-              --outputdir-local=${outdir} \
+              --output-dir=${outdir} \
+              --release-suffix="${RELEASE_SUFFIX}" \
               $UPLOAD_PARAM
       else
           echo "${LOGPREFIX} No image found"
@@ -84,7 +78,8 @@
           echo "${LOGPREFIX} SDK dir found, execute ${script}"
           ${script} \
               --sdk-dir=${ARTIFACTS_SDK_PATH} \
-              --outputdir-local="${outdir}"
+              --output-dir="${outdir}" \
+              --release-suffix="${RELEASE_SUFFIX}"
       else
           echo "${LOGPREFIX} No SDK found"
       fi
@@ -107,7 +102,8 @@
           ${apscript} \
                 --images-dir="${ARTIFACTS_IMAGE_PATH}" \
                 --outputdir-local=${outdir} \
-                --outputdir-upload=release
+                --outputdir-upload=release \
+                --release-suffix="${RELEASE_SUFFIX}"
       else
           echo "${LOGPREFIX} No AlphaPlan FWR articles need to be generated"
       fi
diff --git a/manifest-pipeline-ci-test.yml b/manifest-pipeline-ci-test.yml
index cae23bc..08cbad8 100644
--- a/manifest-pipeline-ci-test.yml
+++ b/manifest-pipeline-ci-test.yml
@@ -6,19 +6,21 @@ variables:
   # The id of the gitlab project used in the rules section to not run pipelines in
   # forked projects. Using variable here, to allow override in other projects including
   # this file.
-  MANIFEST_PROJECT_ID: 1725
+  MANIFEST_PROJECT_ID: 1742
 
   # In the manifest, the remotes are specified by an identifier. This is used to find
   # out included projects for the retrigger job. In custom manifests, the remote may be
   # named differently, so we need a variable that may be overriden.
-  CI_PARAM_SECO_REMOTE: seco-ne
+  CI_PARAM_SECO_REMOTE: ci-test
+
+  # GitLab group to search for projects to retrigger
+  RETRIGGER_GROUP: ${CI_PROJECT_ROOT_NAMESPACE}/yocto/infrastructure/ci-test
 
   BUILD_TIMEOUT: 2m
 
   # This is the jinja2 template file used to generate the build jobs
-  BUILD_JOBS_TEMPLATE: build-jobs-ci-test.jinja2
+  BUILD_JOBS_TEMPLATE: build-jobs-ci-test.yml.jinja2
 
   # The master branch is hardcoded here, because it cannot be determined automatically.
   # Has to be modified for new branches, e.g. new Yocto versions or fix releases.
-  MASTER_BRANCH_MANIFEST: master
-  MASTER_BRANCH_PROJECT: master
+  MASTER_BRANCH: master
diff --git a/manifest-pipeline-yocto.yml b/manifest-pipeline-yocto.yml
index 30976f6..631ebd9 100644
--- a/manifest-pipeline-yocto.yml
+++ b/manifest-pipeline-yocto.yml
@@ -13,21 +13,26 @@ variables:
   # named differently, so we need a variable that may be overriden.
   CI_PARAM_SECO_REMOTE: seco-ne
 
+  # GitLab group to search for projects to retrigger
+  RETRIGGER_GROUP: ${CI_PROJECT_ROOT_NAMESPACE}
+
   BUILD_TIMEOUT: 1h
 
   # This is the jinja2 template file used to generate the build jobs
-  BUILD_JOBS_TEMPLATE: build-jobs-yocto.jinja2
+  BUILD_JOBS_TEMPLATE: build-jobs-yocto.yml.jinja2
 
-  # Default image and distro
-  CI_PARAM_IMAGE: guf-image
-  CI_PARAM_DISTRO: guf-wayland
+generate-build-jobs:
+  variables:
+    # Default image and distro
+    CI_PARAM_IMAGE: guf-image
+    CI_PARAM_DISTRO: guf-wayland
 
-  # Flash-N-Go image and distro
-  # In the past, the buildfng job overwrote the image and distro itself. Due to the
-  # transition to the new seconorth names, image and distro for the buildfng must be
-  # settable from outside of the job.
-  CI_PARAM_IMAGE_FNG: fngsystem-image
-  CI_PARAM_DISTRO_FNG: guf-fngsystem
+    # Flash-N-Go image and distro
+    # In the past, the buildfng job overwrote the image and distro itself. Due to the
+    # transition to the new seconorth names, image and distro for the buildfng must be
+    # settable from outside of the job.
+    CI_PARAM_IMAGE_FNG: fngsystem-image
+    CI_PARAM_DISTRO_FNG: guf-fngsystem
 
-  # List of machines to build images for
-  CI_PARAM_MACHINES: imx6guf imx6ullguf imx8mguf imx8mpguf
+    # List of machines to build images for
+    CI_PARAM_MACHINES: imx6guf imx6ullguf imx8mguf imx8mpguf
diff --git a/manifest-pipeline.yml b/manifest-pipeline.yml
index 94c281a..80a3605 100644
--- a/manifest-pipeline.yml
+++ b/manifest-pipeline.yml
@@ -6,14 +6,16 @@ include:
   - local: common.yml
 
 stages:
+  - manifest-pipeline
   - retrigger
-  - infrastructure
   - build
 
 workflow:
   rules:
     # Explicitly allow externally triggered pipelines in every case
-    - if: $CI_PIPELINE_SOURCE == "pipeline" || $CI_PIPELINE_SOURCE == "api"
+    - if: $CI_PIPELINE_SOURCE == "api"
+    - if: $CI_PIPELINE_SOURCE == "pipeline"
+    - if: $CI_PIPELINE_SOURCE == "web"
     # Do not run pipelines for merge requests
     - if: $CI_MERGE_REQUEST_IID
       when: never
@@ -27,44 +29,36 @@ workflow:
     # In all other cases, run the pipeline automatically
     - when: always
 
-# --------------------------------------------------------------------------------------
-# Stage: retrigger
-# --------------------------------------------------------------------------------------
-retrigger:
-  extends: .infrastructure
-  stage: retrigger
+.full_build_pipeline:
   rules:
-    - if: $CI_COMMIT_REF_NAME == $MASTER_BRANCH_MANIFEST && $CI_PIPELINE_SOURCE != "api"
-  script:
-    - PROJECTS=$(
-        .gitlab-ci/scripts/get_manifest_projects.py
-        --manifest=default.xml
-        --remote=${CI_PARAM_SECO_REMOTE}
-        --concat-namespaces
-      )
-    # Add the gitlab-ci project
-    - PROJECTS="$PROJECTS ${CI_PROJECT_ROOT_NAMESPACE}/yocto/infrastructure/gitlab-ci"
-    # TODO retrigger gitlab-ci integration also
-    # Retrigger also project in SRCREV
-    - echo -e "Projects:\n${PROJECTS}"
-    - for PROJECT in ${PROJECTS}; do
-        .gitlab-ci/scripts/retrigger_mr_pipeline_jobs.py
-          --gitlab-url=${CI_SERVER_URL}
-          --token=${GITBOT_TOKEN}
-          --project=${PROJECT}
-          --state=opened
-          --target-branch=${MASTER_BRANCH_MANIFEST}
-          --job=check
-        ;
-      done
+    # Run the full build pipeline on non-master branches (i.e. in the merge request)
+    # or if explicitly triggered by the API or the web button.
+    - if: $CI_COMMIT_REF_NAME != $MASTER_BRANCH
+    - if: $CI_PIPELINE_SOURCE == "api"
+    - if: $CI_PIPELINE_SOURCE == "pipeline"
+    - if: $CI_PIPELINE_SOURCE == "web"
+  stage: manifest-pipeline
+
+.short_master_pipeline:
+  rules:
+    # The short master pipeline does not execute a full build, but only mirrors the
+    # build result from the merge request. Run it on the master branch per default if
+    # not explicitly triggered.
+    - if: $CI_PIPELINE_SOURCE == "api"
+      when: never
+    - if: $CI_PIPELINE_SOURCE == "pipeline"
+      when: never
+    - if: $CI_PIPELINE_SOURCE == "web"
+      when: never
+    - if: $CI_COMMIT_REF_NAME == $MASTER_BRANCH
 
 # --------------------------------------------------------------------------------------
-# Stage: infrastructure
+# Full build pipeline (runs in merge requests, and on master if manually triggered)
 # --------------------------------------------------------------------------------------
 generate-build-jobs:
-  extends: .infrastructure
-  rules:
-    - if: $CI_COMMIT_REF_NAME != $MASTER_BRANCH_MANIFEST || $CI_PIPELINE_SOURCE == "api"
+  extends:
+    - .infrastructure
+    - .full_build_pipeline
   script:
     # The job generation script implicitly passes the OS environment to the template, so
     # that the template has access to all GitLab CI variables. Hence there is no need
@@ -77,11 +71,10 @@ generate-build-jobs:
     paths:
       - build-jobs.yml
 
-trigger-build-jobs:
-  stage: infrastructure
+build-jobs:
+  extends:
+    - .full_build_pipeline
   needs: ["generate-build-jobs"]
-  rules:
-    - if: $CI_COMMIT_REF_NAME != $MASTER_BRANCH_MANIFEST || $CI_PIPELINE_SOURCE == "api"
   trigger:
     include:
       - artifact: build-jobs.yml
@@ -89,20 +82,35 @@ trigger-build-jobs:
     strategy: depend
 
 yamllint:
-  extends: .yamllint
+  extends:
+    - .yamllint
+    - .full_build_pipeline
 
 # --------------------------------------------------------------------------------------
-# Stage: build
+# Short master pipeline (runs on master after merging a merge request)
 # --------------------------------------------------------------------------------------
+retrigger:
+  extends:
+    - .infrastructure
+    - .short_master_pipeline
+  stage: retrigger
+  script:
+    - .gitlab-ci/scripts/retrigger_integrating_projects.py
+        --gitlab-url=${CI_SERVER_URL}
+        --token=${GITBOT_TOKEN}
+        --manifest-project=${CI_PROJECT_PATH}
+        --manifest-branch=${MASTER_BRANCH}
+        --group=${RETRIGGER_GROUP}
+
 build:merge_request:
-  extends: .infrastructure
+  extends:
+    - .infrastructure
+    - .short_master_pipeline
   stage: build
   # For some reason we cannot use a variable for the timeout, the CI linter reports
   # 'jobs:build:merge_request:timeout config should be a duration' then.
   # Hence we use GitLab's 'reference' syntax instead.
   timeout: !reference [variables, BUILD_TIMEOUT]
-  rules:
-    - if: $CI_COMMIT_REF_NAME == $MASTER_BRANCH_MANIFEST && $CI_PIPELINE_SOURCE != "api"
   script:
     - cd ${CI_PROJECT_DIR}
     # Get pipeline for merge request
@@ -111,7 +119,7 @@ build:merge_request:
         --token=${GITBOT_TOKEN}
         --project=${CI_PROJECT_PATH}
         --commit=${CI_COMMIT_SHA}
-        --ref=^${MASTER_BRANCH_MANIFEST} || true | head -1)
+        --ref=^${MASTER_BRANCH} || true | head -1)
     # If pipeline exists, mirror its result
     - if [ ! -z "${MR_PIPELINE}" ]; then
         .gitlab-ci/scripts/mirror_pipeline_result.py
@@ -125,5 +133,5 @@ build:merge_request:
           --gitlab-url=${CI_SERVER_URL}
           --token=${GITBOT_TOKEN}
           --project=${CI_PROJECT_PATH}
-          --ref=${MASTER_BRANCH_MANIFEST}
+          --ref=${MASTER_BRANCH}
     - fi
diff --git a/scripts/alphaplan_fwr.py b/scripts/alphaplan_fwr.py
index 4e6db22..7b27dca 100755
--- a/scripts/alphaplan_fwr.py
+++ b/scripts/alphaplan_fwr.py
@@ -284,6 +284,11 @@ def main():
         help="""Base directory for locally deployed artifacts, should contain absolut path.""",
         dest="outputdir_local",
     )
+    parser.add_argument(
+        "--release-suffix",
+        help="""Suffix to append to the release folder""",
+        dest="release_suffix",
+    )
     args, _ = parser.parse_known_args()
 
     if args.outputdir_upload is None or args.outputdir_local is None:
@@ -302,13 +307,18 @@ def main():
     artifacts_all = buildvars["DISTRO_RELEASE_ARTEFACTS"].split()
     artifacts_all.append("BUILD_SRCREVS.log")
 
+    # Set release name
     if version.startswith("fngsystem"):
-        release_name_local = version.replace("fngsystem", "FNGSystem")
+        release_name = version.replace("fngsystem", "FNGSystem")
     else:
-        release_name_local = "Yocto-%s" % version
+        release_name = "Yocto-%s" % version
+
+    # Append release suffix
+    if args.release_suffix is not None:
+        release_name = release_name + args.release_suffix
 
-    output_dir = os.path.join(args.outputdir_upload, release_name_local)
-    outlocal_dir = os.path.join(args.outputdir_local, release_name_local)
+    output_dir = os.path.join(args.outputdir_upload, release_name)
+    outlocal_dir = os.path.join(args.outputdir_local, release_name)
     if not os.path.isdir(outlocal_dir):
         sys.exit("ERROR: ouputdir-local does not exist")
     if not os.path.isdir(output_dir):
@@ -329,7 +339,7 @@ def main():
         output_dir,
         outlocal_dir,
         machine,
-        release_name_local,
+        release_name,
         artifacts_all,
         md5sums,
     )
diff --git a/scripts/changelog_generator.py b/scripts/changelog_generator.py
index 901cd8a..43c94f8 100755
--- a/scripts/changelog_generator.py
+++ b/scripts/changelog_generator.py
@@ -28,7 +28,7 @@ GITLAB_SERVER = "https://git.seco.com"
 GITLAB_GROUP_ID = "556"
 
 DISTRO_PROJECT_ID = "1748"
-MACHINE_PROJECT_ID = "1747"
+MACHINE_PROJECT_ID = "2074"
 MANIFEST_PROJECT_ID = "1725"
 
 DEFAULTBRANCH = "dunfell"
diff --git a/scripts/check_if_integration_branch_is_up_to_date.py b/scripts/check_if_integration_branch_is_up_to_date.py
index 42c2605..ce1ac44 100755
--- a/scripts/check_if_integration_branch_is_up_to_date.py
+++ b/scripts/check_if_integration_branch_is_up_to_date.py
@@ -5,55 +5,42 @@ import argparse
 import sys
 import logging
 from gitlab import Gitlab, GitlabGetError
+from gitlab.v4.objects import Project
 
 
 def check_if_integration_branch_is_up_to_date(
-    manifest_project,
-    integration_base,
-    project,
-    merge_request,
+    target_project: Project,
+    target_branch_name: str,
+    integration_branch_name: str,
 ):
-
-    integration_branch = None
-    branch_list = []
-    if common.is_gitlab_ci_integration_branch(merge_request.source_branch):
-        try:
-            integration_branch = manifest_project.branches.get(
-                merge_request.source_branch,
-                retry_transient_errors=True,
-            )
-            branch_list.append(merge_request.source_branch)
-        except GitlabGetError:
-            # Branch not found
-            pass
-    if integration_branch is None:
-        integration_branch_name = common.integration_branch_name(
-            project.name, merge_request.source_branch
+    try:
+        integration_branch = target_project.branches.get(
+            integration_branch_name, retry_transient_errors=True
         )
-        branch_list.append(integration_branch)
-        try:
-            integration_branch = manifest_project.branches.get(
-                integration_branch_name,
-                retry_transient_errors=True,
-            )
-        except GitlabGetError:
-            sys.exit(
-                "ERROR: could not find integration branch in {},"
-                "branch names checked: {}\n".format(manifest_project.name, branch_list)
+    except GitlabGetError:
+        sys.exit(
+            "ERROR: could not find integration branch {} in {}.".format(
+                integration_branch_name, target_project.name
             )
+        )
 
     try:
-        integration_base_branch = manifest_project.branches.get(
-            integration_base, retry_transient_errors=True
+        target_branch = target_project.branches.get(
+            target_branch_name, retry_transient_errors=True
         )
     except GitlabGetError:
-        sys.exit("ERROR: could not find integration base branch\n")
-
-    integration_base_id = integration_base_branch.commit["id"]
+        sys.exit(
+            "ERROR: could not find target branch {} in {}.".format(
+                target_branch_name, target_project.name
+            )
+        )
 
     # Loop over the commits until the integration_branch head id is found
     return common.is_commit_parent_of_project_commit(
-        manifest_project, integration_branch.commit["id"], integration_base_id, limit=10
+        target_project,
+        integration_branch.commit["id"],
+        target_branch.commit["id"],
+        limit=10,
     )
 
 
@@ -72,34 +59,29 @@ def main():
         required=True,
     )
     parser.add_argument(
-        "--manifest-project",
-        help="""name of the manifest project""",
-        dest="manifest_project",
+        "--target-project",
+        help="""name of the target project""",
+        dest="target_project",
         required=True,
     )
     parser.add_argument(
-        "--integration-base",
-        help="""manifest branch to branch off from""",
-        dest="integration_base",
+        "--target-branch",
+        help="""target branch to integrate into""",
+        dest="target_branch",
         required=True,
     )
     parser.add_argument(
-        "--project",
-        help="""name of the project, as specified in the manifest""",
-        dest="project",
+        "--source-project",
+        help="""name of the source project""",
+        dest="source_project",
         required=True,
     )
     parser.add_argument(
         "--merge-request",
-        help="""project merge request IID containing the changes to be integrated""",
+        help="""source project merge request IID containing the changes to be integrated""",
         dest="merge_request",
         required=True,
     )
-    parser.add_argument(
-        "--parent-merge-request",
-        help="""parent merge requests link, only used for a hint when the check failes""",
-        dest="parent_merge_request",
-    )
     parser.add_argument(
         "-v",
         "--verbose",
@@ -114,29 +96,39 @@ def main():
     gitlab = Gitlab(args.gitlab_url, private_token=args.token)
 
     logging.debug(args)
-    manifest_project = common.get_project(gitlab, args.manifest_project)
-    project = common.get_project(gitlab, args.project)
-    merge_request = common.get_merge_request(project, args.merge_request)
+    target_project = common.get_project(gitlab, args.target_project)
+    source_project = common.get_project(gitlab, args.source_project)
+    merge_request = common.get_merge_request(source_project, args.merge_request)
     if merge_request is None:
-        sys.exit("ERROR: could not get %s  %s" % (project.name, args.merge_request))
+        sys.exit(
+            "ERROR: could not get %s  %s" % (source_project.name, args.merge_request)
+        )
+
+    integration_branch_name = common.integration_branch_name(
+        source_project.name, merge_request.source_branch, args.target_branch
+    )
 
     if check_if_integration_branch_is_up_to_date(
-        manifest_project=manifest_project,
-        integration_base=args.integration_base,
-        project=project,
-        merge_request=merge_request,
+        target_project=target_project,
+        target_branch_name=args.target_branch,
+        integration_branch_name=integration_branch_name,
     ):
-        print("Integration branch is up to date.")
+        print(
+            "Integration branch {} in {} is up to date.".format(
+                integration_branch_name, target_project.name
+            )
+        )
     else:
-        mr_url = merge_request.web_url + "/pipelines"
-        if args.parent_merge_request is not None:
-            mr_url = args.parent_merge_request + "/pipelines"
-
         sys.exit(
-            "Integration branch is not up to date. Please re-run the MR pipeline:\n"
+            "Integration branch {} in {} is not up to date.\n"
+            "Please re-run the MR pipeline:\n"
             "  1. Open the MR pipelines page:\n"
-            "     %s\n"
-            "  2. Click 'Run Pipeline'" % (mr_url)
+            "     {}\n"
+            "  2. Click 'Run Pipeline'".format(
+                integration_branch_name,
+                target_project.name,
+                merge_request.web_url + "/pipelines",
+            )
         )
 
 
diff --git a/scripts/colors.py b/scripts/colors.py
new file mode 100755
index 0000000..f218ab0
--- /dev/null
+++ b/scripts/colors.py
@@ -0,0 +1,47 @@
+#!/usr/bin/env python3
+# Taken from here: https://stackoverflow.com/a/26445590/3018229
+class colors:
+    """Colors class:
+    Reset all colors with colors.reset
+    Two subclasses fg for foreground and bg for background.
+    Use as colors.subclass.colorname.
+    i.e. colors.fg.red or colors.bg.green
+    Also, the generic bold, disable, underline, reverse, strikethrough,
+    and invisible work with the main class
+    i.e. colors.bold
+    """
+
+    reset = "\033[0m"
+    bold = "\033[01m"
+    dim = "\033[02m"
+    underline = "\033[04m"
+    reverse = "\033[07m"
+    strikethrough = "\033[09m"
+    invisible = "\033[08m"
+
+    class fg:
+        black = "\033[30m"
+        red = "\033[31m"
+        green = "\033[32m"
+        orange = "\033[33m"
+        blue = "\033[34m"
+        purple = "\033[35m"
+        cyan = "\033[36m"
+        lightgrey = "\033[37m"
+        darkgrey = "\033[90m"
+        lightred = "\033[91m"
+        lightgreen = "\033[92m"
+        yellow = "\033[93m"
+        lightblue = "\033[94m"
+        pink = "\033[95m"
+        lightcyan = "\033[96m"
+
+    class bg:
+        black = "\033[40m"
+        red = "\033[41m"
+        green = "\033[42m"
+        orange = "\033[43m"
+        blue = "\033[44m"
+        purple = "\033[45m"
+        cyan = "\033[46m"
+        lightgrey = "\033[47m"
diff --git a/scripts/common.py b/scripts/common.py
index 6e45da0..e6e4f65 100755
--- a/scripts/common.py
+++ b/scripts/common.py
@@ -4,6 +4,7 @@ import logging
 import requests
 import sys
 import time
+from colors import colors
 from furl import furl
 from git import Actor, GitCommandError
 from git.repo.base import Repo
@@ -17,9 +18,16 @@ srcrev_file = "SRCREV.conf"
 pending_states = ["created", "waiting_for_resource", "preparing", "pending", "running"]
 
 
-def integration_branch_name(project_name, branch_name):
+def integration_branch_name(project_name, source_branch_name, target_branch_name):
     """Get integration branch name"""
-    return "integrate/" + project_name.lower() + "/" + branch_name
+    return (
+        "integrate/"
+        + project_name.lower()
+        + "/"
+        + source_branch_name
+        + "/into/"
+        + target_branch_name
+    )
 
 
 def is_gitlab_ci_integration_branch(branch_name):
@@ -151,27 +159,39 @@ def list_commits(commits):
 
 
 def commit_and_push(
-    project: Project, repo: Repo, branch, message, name, email, less_verbose=False
+    project: Project, repo: Repo, message, name, email, less_verbose=False
 ):
     """Commit and push to a repo branch"""
+    branch = repo.head.reference
     author = Actor(name, email)
+
+    logging.debug("Committing changes:")
+    logging.debug(repo.git.diff("--staged"))
+
     repo.index.commit(message, author=author, committer=author)
-    print(repo.git.log("--oneline", "-n", "5"))
+
+    logging.debug("Git log:")
+    logging.debug(repo.git.log("--oneline", "-n", "5"))
 
     # Push commit
     try:
         origin = repo.remote("origin")
         logging.debug("Push branch %s to %s", branch, origin)
-        origin.push(branch, force=True)
+        origin.push(branch, force=True).raise_if_error()
     except GitCommandError as e:
-        sys.exit("ERROR: could not commit changes\n" + str(e))
+        sys.exit("ERROR: could not push branch %s to %s\n" % (branch, origin) + str(e))
 
     # Print commit information
     revision = repo.head.commit.hexsha
     print("Pushed new commit:")
     print(project.web_url + "/-/commit/" + revision)
     if not less_verbose:
-        print(repo.git.show("--summary", "--decorate"))
+        print(
+            colors.fg.lightgrey
+            + repo.git.show("--summary", "--decorate")
+            + colors.reset
+            + "\n"
+        )
 
     return revision
 
@@ -231,7 +251,7 @@ def clone_project(project: Project, into, branch=None):
         repo = Repo.clone_from(clone_url.url, into, branch=branch, depth=1)
     except GitCommandError as e:
         raise Exception("could not clone repository\n" + str(e)) from e
-    except IndexError:
+    except IndexError as e:
         raise Exception("branch '%s' not found" % branch) from e
     return repo
 
diff --git a/scripts/convert_md2html.py b/scripts/convert_md2html.py
index 3682eeb..6333789 100755
--- a/scripts/convert_md2html.py
+++ b/scripts/convert_md2html.py
@@ -198,7 +198,7 @@ def main():
     )
     parser.add_argument(
         "outfile",
-        help="""Markdown file to write.""",
+        help="""Html file to write.""",
     )
     args = parser.parse_args()
     convertmd2html(args.infile, args.outfile)
diff --git a/scripts/deploy_gitlab_ci.py b/scripts/deploy_gitlab_ci.py
index 88839d3..daa1c7c 100755
--- a/scripts/deploy_gitlab_ci.py
+++ b/scripts/deploy_gitlab_ci.py
@@ -6,14 +6,13 @@ import logging
 import sys
 import os
 from gitlab import Gitlab
+from gitlab.v4.objects import Project, MergeRequest
 
 from accept_merge_request import accept_merge_request
 from create_merge_request import create_merge_request
+from get_integration_sources import get_integration_sources
 from get_merge_requests import get_merge_requests
-from update_submodule import (
-    update_submodule_and_include_ref,
-    get_submodule_project_path_and_revision,
-)
+from update_submodule import update_submodule_and_include_ref
 from integrate_into_manifest import update_manifest, update_srcrev
 
 from ruamel.yaml import YAML
@@ -26,17 +25,12 @@ def read_keys_from_gitlab_ci_yml(gitlab_ci_yml):
     data = yaml.load(gitlab_ci_yml)
     logging.debug("Yaml: %s", data)
 
-    try:
-        masterbranch = data["variables"]["MASTER_BRANCH_PROJECT"]
-        logging.debug("Masterbranch %s", masterbranch)
-    except KeyError:
-        masterbranch = None
     try:
         recipe = data["variables"]["BB_RECIPE_NAME"]
         logging.debug("Recipe %s", recipe)
     except KeyError:
         recipe = None
-    return {"recipe": recipe, "masterbranch": masterbranch}
+    return {"recipe": recipe}
 
 
 def integrate_submodule_into(
@@ -53,6 +47,7 @@ def integrate_submodule_into(
 
     (
         project_repo,
+        project_dir,
         integration_branch_name,
         integration_commit,
         message,
@@ -71,7 +66,9 @@ def integrate_submodule_into(
     ret = {
         "project": gitlab_project,
         "repo": project_repo,
-        "branch": integration_branch_name,
+        "dir": project_dir,
+        "integration_branch": integration_branch_name,
+        "master_branch": branch,
         "commit": integration_commit,
         "message": message,
     }
@@ -83,12 +80,15 @@ def integrate_submodule_into(
     return ret
 
 
-def create_integration_merge_request(project, integration_branch_name, source_mr=None):
+def create_integration_merge_request(
+    project: Project,
+    integration_branch: str,
+    target_branch: str,
+    source_mr: MergeRequest = None,
+) -> MergeRequest:
     # Create merge request
     # This should be optional
-    mr, created = create_merge_request(
-        project, integration_branch_name, project.default_branch
-    )
+    mr, created = create_merge_request(project, integration_branch, target_branch)
     if created:
         if source_mr is not None:
             common.crosslink_merge_requests(source_mr, mr)
@@ -113,10 +113,16 @@ def main():
         required=True,
     )
     parser.add_argument(
-        "--project",
         "--manifest-project",
-        help="""name of the GitLab project""",
-        dest="project",
+        help="""name of the manifest project""",
+        dest="manifest_project",
+        required=True,
+    )
+    parser.add_argument(
+        "--manifest-branch",
+        help="""manifest branch to integrate changes into (can be a comma-separated list)""",
+        dest="manifest_branch",
+        required=True,
     )
     parser.add_argument(
         "--submodule",
@@ -130,13 +136,6 @@ def main():
         dest="revision",
         required=True,
     )
-    parser.add_argument(
-        "--branch",
-        help="""project branch (if not default branch)""",
-        dest="branch",
-        required=False,
-        default=None,
-    )
     parser.add_argument(
         "--merge",
         help="""if set, perform merge after integration""",
@@ -145,6 +144,20 @@ def main():
         required=False,
         default=False,
     )
+    parser.add_argument(
+        "--project",
+        help="""gitlab-ci project path or id""",
+        dest="project",
+        default=os.environ.get("CI_PROJECT_PATH"),
+        required=False,
+    )
+    parser.add_argument(
+        "--branch",
+        help="""gitlab-ci branch that we're merging into""",
+        dest="branch",
+        default="master",
+        required=False,
+    )
     parser.add_argument(
         "--manifest-file",
         help="""manifest file name (default: 'default.xml')""",
@@ -160,10 +173,10 @@ def main():
         required=False,
     )
     parser.add_argument(
-        "projects",
-        help="""List of projects the change should be deployed to additionally
-                to the manifest project given as named parameter.""",
-        nargs="*",
+        "--group",
+        help="""group path or id to limit search scope to""",
+        dest="group",
+        required=True,
     )
     parser.add_argument(
         "-v",
@@ -180,53 +193,71 @@ def main():
             datefmt="%H:%M:%S",
         )
 
+    manifest_branches = args.manifest_branch.split(",")
+
     gitlab = Gitlab(args.gitlab_url, private_token=args.token)
+    group = gitlab.groups.get(args.group)
 
     # =======================================================
     # Create integration branches and commits with updates
     # submodule in all projects
     # =======================================================
-    project_integration = {}
-    # Update submodule in all 'child' project
-    for p in args.projects:
-        print("Create integration commit in", p)
+    integration_sources = {}
+    all_integration_sources = []
+    for manifest_branch in manifest_branches:
+        print(
+            "Searching for projects in %s that are configured for automatic integration into %s:%s"
+            % (args.group, args.manifest_project, manifest_branch)
+        )
+        integration_sources[manifest_branch] = get_integration_sources(
+            args.manifest_project, manifest_branch, group
+        )
+        for s in integration_sources[manifest_branch]:
+            if s not in all_integration_sources:
+                all_integration_sources.append(s)
+
+    # Update submodule in all integration sources
+    project_integrations = []
+    for s in all_integration_sources:
+        print("Create integration commit in %s:%s" % (s["project"], s["branch"]))
 
-        res = integrate_submodule_into(
-            gitlab, p, args.submodule, args.revision, args.branch
+        integration = integrate_submodule_into(
+            gitlab, s["project"], args.submodule, args.revision, s["branch"]
         )
         # Store in the list if commit is set (meaning there was an update or
         #   an exising integration branch)
-        if res["commit"] is not None:
-            project_integration[p] = res
-
-    print("Create integration commit in", args.project)
-    # Update submodule in manifest project
-    manifest_project = integrate_submodule_into(
-        gitlab,
-        args.project,
-        args.submodule,
-        args.revision,
-        args.branch,
-        commit_and_push=False,
-        force_clone=True,
-    )
+        if integration["commit"] is not None:
+            project_integrations.append(integration)
+
+    # Update submodule in all manifest branches
+    manifest_integrations = []
+    for manifest_branch in manifest_branches:
+        print(
+            "Create integration commit in %s:%s"
+            % (args.manifest_project, manifest_branch),
+        )
+        manifest_integrations.append(
+            integrate_submodule_into(
+                gitlab,
+                args.manifest_project,
+                args.submodule,
+                args.revision,
+                manifest_branch,
+                commit_and_push=False,
+                force_clone=True,
+            )
+        )
 
-    branch = args.branch
-    if branch is None:
-        branch = manifest_project["project"].default_branch
     # =======================================================
     # Create and merge merge_requests if needed
     # =======================================================
     if args.merge:
-        # Get source merge request ( the one in the gitlab-ci repo)
-        submodule_project_path, _ = get_submodule_project_path_and_revision(
-            manifest_project["project"], args.submodule, branch
-        )
-        submodule_project = common.get_project(gitlab, submodule_project_path)
+        # Get source merge request (the one in the gitlab-ci repo)
+        gitlab_ci_project = common.get_project(gitlab, args.project)
         mrs = get_merge_requests(
-            submodule_project,
-            # TODO should this be submodule_project's default branch?
-            target_branch="master",
+            project=gitlab_ci_project,
+            target_branch=args.branch,
+            state="merged",
             commit=args.revision,
         )
         if not mrs:
@@ -236,128 +267,140 @@ def main():
             )
         source_mr = mrs[0]
 
-        for p in project_integration:
-            integration = project_integration[p]
-            logging.debug("Create MR in %s", integration["project"].name)
+        for project_integration in project_integrations:
+            logging.debug("Create MR in %s", project_integration["project"].name)
             mr = create_integration_merge_request(
-                integration["project"], integration["branch"], source_mr
+                project_integration["project"],
+                project_integration["integration_branch"],
+                project_integration["master_branch"],
+                source_mr,
             )
-            integration["mr"] = mr
             # Now merge
-            logging.debug("Merge %s!%s", p, mr.iid)
+            logging.debug("Merge %s!%s", project_integration["project"], mr.iid)
 
             # Wait until GitLab has checked merge status
-            common.wait_until_merge_status_is_set(integration["project"], mr)
+            common.wait_until_merge_status_is_set(project_integration["project"], mr)
 
             # Attempt to merge
             merged, integration_commit = accept_merge_request(
-                integration["project"], mr, rebase=True
+                project_integration["project"], mr, rebase=True
             )
             # if this has rebased the integration commit needs to be adapted:
-            project_integration[p]["commit"] = integration_commit
+            project_integration["commit"] = integration_commit
             # Save the target branch here, as the source branch gets deleted
             # during merge
-            project_integration[p]["branch"] = mr.target_branch
+            project_integration["integration_branch"] = mr.target_branch
 
             if not merged:
                 sys.exit(
-                    "Integration MR could not be merged. You have two possibilities to fix "
-                    "this:\n"
-                    "  1. Checkout the MR and rebase it on the current master manually, or\n"
-                    "  2. Delete the MR (Edit -> Delete in the MR UI)\n"
-                    "In either case restart this job afterwards in order to get it merged."
+                    "Integration MR could not be merged:\n"
+                    "%s\n"
+                    "This can probably be resolved by creating a new commit in "
+                    "gitlab-ci and merging it. The above MR can be closed then."
+                    % mr.web_url
                 )
 
-        print("Successfully merged")
-
     # =======================================================
     # Now create the integration commit in the manifest
     # for all subprojects at once
     # =======================================================
-    manifest_file_abs = os.path.join(
-        manifest_project["repo"].working_tree_dir, args.manifest_file
-    )
-    logging.debug("Read manifest from: %s", manifest_file_abs)
-    with open(manifest_file_abs, "r", encoding="utf8") as fp:
-        manifest = fp.read()
-    logging.debug(manifest)
-    srcrev_file_abs = os.path.join(
-        manifest_project["repo"].working_tree_dir, args.srcrev_file
-    )
-    logging.debug("Read manifest from: %s", srcrev_file_abs)
-    with open(srcrev_file_abs, "r", encoding="utf8") as fp:
-        srcrev = fp.read()
-    logging.debug(srcrev)
-
-    for p in project_integration:
-        integration = project_integration[p]
-        logging.debug(
-            "Update %s to %s", integration["project"].name, integration["commit"]
+    for manifest_integration in manifest_integrations:
+        manifest_file_abs = os.path.join(
+            manifest_integration["repo"].working_tree_dir, args.manifest_file
         )
-
-        new_manifest = update_manifest(
-            manifest, integration["project"], integration["commit"]
+        logging.debug("Read manifest from: %s", manifest_file_abs)
+        with open(manifest_file_abs, "r", encoding="utf8") as fp:
+            manifest = fp.read()
+        logging.debug(manifest)
+        srcrev_file_abs = os.path.join(
+            manifest_integration["repo"].working_tree_dir, args.srcrev_file
         )
-        if new_manifest is not None:
-            manifest = new_manifest
-            logging.debug(manifest)
-            continue
-
-        # get BB_RECIPE_NAME from the projects .gitlab-ci.yml
-        # Use direct read from gitlab as we have not checked out
-        # the repo if the branch is already up to date
-
-        gitlab_ci_yml = common.get_repository_file_raw(
-            integration["project"], ".gitlab-ci.yml", ref=integration["branch"]
+        logging.debug("Read manifest from: %s", srcrev_file_abs)
+        with open(srcrev_file_abs, "r", encoding="utf8") as fp:
+            srcrev = fp.read()
+        logging.debug(srcrev)
+
+        for project_integration in project_integrations:
+            # Check if project integration belongs to this manifest branch
+            for source in integration_sources[manifest_integration["master_branch"]]:
+                if (
+                    source["project"]
+                    == project_integration["project"].path_with_namespace
+                    and source["branch"] == project_integration["master_branch"]
+                ):
+                    logging.debug(
+                        "Update %s to %s",
+                        project_integration["project"].name,
+                        project_integration["commit"],
+                    )
+
+                    new_manifest = update_manifest(
+                        manifest,
+                        project_integration["project"],
+                        project_integration["commit"],
+                    )
+                    if new_manifest is not None:
+                        manifest = new_manifest
+                        logging.debug(manifest)
+                        continue
+
+                    # get BB_RECIPE_NAME from the projects .gitlab-ci.yml
+                    # Use direct read from gitlab as we have not checked out
+                    # the repo if the branch is already up to date
+
+                    gitlab_ci_yml = common.get_repository_file_raw(
+                        project_integration["project"],
+                        ".gitlab-ci.yml",
+                        ref=project_integration["integration_branch"],
+                    )
+                    project_keys = read_keys_from_gitlab_ci_yml(gitlab_ci_yml)
+
+                    new_srcrev = update_srcrev(
+                        srcrev, project_keys["recipe"], project_integration["commit"]
+                    )
+                    if new_srcrev is not None:
+                        srcrev = new_srcrev
+                        logging.debug(srcrev)
+                    else:
+                        logging.debug(
+                            "Project %s not found in xml or srcrev file",
+                            project_integration["project"],
+                        )
+
+        # Write manifest
+        with open(manifest_file_abs, "w", encoding="utf8") as fp:
+            fp.write(manifest)
+        manifest_integration["repo"].git.add(args.manifest_file)
+        logging.debug(manifest)
+        with open(srcrev_file_abs, "w", encoding="utf8") as fp:
+            fp.write(srcrev)
+        manifest_integration["repo"].git.add(args.srcrev_file)
+        logging.debug(srcrev)
+
+        # ========================================================
+        # Squash all commits on the integration branch to one
+        # ========================================================
+        manifest_integration["repo"].remotes.origin.fetch(
+            manifest_integration["master_branch"]
         )
-        project_keys = read_keys_from_gitlab_ci_yml(gitlab_ci_yml)
-
-        new_srcrev = update_srcrev(
-            srcrev, project_keys["recipe"], integration["commit"]
+        manifest_master = manifest_integration["project"].branches.get(
+            manifest_integration["master_branch"]
         )
-        if new_srcrev is not None:
-            srcrev = new_srcrev
-            logging.debug(srcrev)
-        else:
-            logging.debug("Project %s not found in xml or srcrev file", p)
-
-    # Write manifest
-    with open(manifest_file_abs, "w", encoding="utf8") as fp:
-        fp.write(manifest)
-    manifest_project["repo"].git.add(args.manifest_file)
-    logging.debug(manifest)
-    with open(srcrev_file_abs, "w", encoding="utf8") as fp:
-        fp.write(srcrev)
-    manifest_project["repo"].git.add(args.srcrev_file)
-    logging.debug(srcrev)
-
-    # ========================================================
-    # Squash all commits on the integration branch to one
-    # ========================================================
-    manifest_project["repo"].remotes.origin.fetch(branch)
-    manifest_master = manifest_project["project"].branches.get(branch)
-    manifest_project["repo"].git.reset("--soft", manifest_master.commit["id"])
-
-    # ========================================================
-    # Now commit and push the changes to the manifest repo
-    # ========================================================
-    # Make an API request to create the gitlab.user object
-    gitlab = manifest_project["project"].manager.gitlab
-    gitlab.auth()
-    integration_commit = common.commit_and_push(
-        manifest_project["project"],
-        manifest_project["repo"],
-        manifest_project["branch"],
-        manifest_project["message"],
-        gitlab.user.username,
-        gitlab.user.email,
-    )
-
-    print(
-        "Successfully create integration commit {} in {}".format(
-            integration_commit, args.project
+        manifest_integration["repo"].git.reset("--soft", manifest_master.commit["id"])
+
+        # ========================================================
+        # Now commit and push the changes to the manifest repo
+        # ========================================================
+        # Make an API request to create the gitlab.user object
+        gitlab = integration["project"].manager.gitlab
+        gitlab.auth()
+        integration_commit = common.commit_and_push(
+            manifest_integration["project"],
+            manifest_integration["repo"],
+            manifest_integration["message"],
+            gitlab.user.username,
+            gitlab.user.email,
         )
-    )
 
     if not args.merge:
         sys.exit(0)
@@ -365,34 +408,36 @@ def main():
     # ============================================
     # Create merge requests for the manifest
     # ============================================
-
-    logging.debug("Create MR in %s", manifest_project["project"].name)
-    manifest_project["mr"] = create_integration_merge_request(
-        manifest_project["project"], manifest_project["branch"], source_mr
-    )
-    # =================================================
-    # Now merge it
-    # =================================================
-    # The manifest needs to be merged at last
-    mr = manifest_project["mr"]
-    logging.debug("Merge %s!%s", args.project, mr.iid)
-
-    # Wait until GitLab has checked merge status
-    common.wait_until_merge_status_is_set(manifest_project["project"], mr)
-
-    # Attempt to merge
-    merged = accept_merge_request(manifest_project["project"], mr, rebase=True)
-
-    if not merged:
-        sys.exit(
-            "Integration MR could not be merged. You have two possibilities to fix "
-            "this:\n"
-            "  1. Checkout the MR and rebase it on the current master manually, or\n"
-            "  2. Delete the MR (Edit -> Delete in the MR UI)\n"
-            "In either case restart this job afterwards in order to get it merged."
+    for integration in manifest_integrations:
+        logging.debug("Create MR in %s", integration["project"].name)
+        mr = create_integration_merge_request(
+            integration["project"],
+            integration["integration_branch"],
+            integration["master_branch"],
+            source_mr,
         )
+        # =================================================
+        # Now merge it
+        # =================================================
+        # The manifest needs to be merged at last
+        logging.debug("Merge %s!%s", args.manifest_project, mr.iid)
 
-    print("Successfully merged")
+        # Wait until GitLab has checked merge status
+        common.wait_until_merge_status_is_set(integration["project"], mr)
+
+        # Attempt to merge
+        merged = accept_merge_request(integration["project"], mr, rebase=True)
+
+        if not merged:
+            sys.exit(
+                "Integration MR could not be merged:\n"
+                "%s\n"
+                "This can probably be resolved by creating a new commit in "
+                "gitlab-ci and merging it. The above MR can be closed then."
+                % mr.web_url
+            )
+
+        print("Successfully merged")
 
 
 if __name__ == "__main__":
diff --git a/scripts/download_file_from_latest_job.py b/scripts/download_file_from_latest_job.py
new file mode 100755
index 0000000..968586a
--- /dev/null
+++ b/scripts/download_file_from_latest_job.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python3
+"""
+
+Downloads given file of the artifacts of a pipeline job.
+
+"""
+
+import argparse
+import logging
+import os
+import sys
+
+import gitlab as gl
+
+__author__ = "Jonas Höppner"
+__email__ = "jonas.hoeppner@garz-fricke.com"
+
+from download_job_artifacts import download_job_artifact
+from get_pipeline_jobs import get_pipeline_jobs
+
+GITLAB_SERVER = "https://git.seco.com"
+
+verbose = 0
+
+
+def main(args):
+    parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]")
+
+    parser.add_argument(
+        "--gitlab-url",
+        help="""URL to the GitLab instance""",
+        dest="gitlab_url",
+        action="store",
+        default=GITLAB_SERVER,
+    )
+    parser.add_argument(
+        "--token",
+        help="""GitLab REST API private access token""",
+        dest="token",
+        required=True,
+    )
+    parser.add_argument(
+        "--project",
+        action="store",
+        dest="project",
+        help="Specify the project by either by id or by path.",
+        required=True,
+    )
+    parser.add_argument(
+        "--pipeline",
+        action="store",
+        dest="pipeline",
+        help="Specify the pipeline by id.",
+    )
+    parser.add_argument(
+        "-s",
+        "--stage",
+        action="store",
+        default=None,
+        help="Filter the jobs by the given stage, if omnitted all jobs are returned.",
+    )
+    parser.add_argument(
+        "-n",
+        "--name",
+        action="store",
+        default=None,
+        help="Filter the jobs by given name, if omnitted all jobs are returned.",
+    )
+    parser.add_argument(
+        "--path",
+        action="store",
+        default=None,
+        help="Path inside the artifacts, if set only one single file is downloaded instead of the complete artifacts.",
+    )
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        action="count",
+        dest="verbose",
+        default=0,
+        help="Increase verbosity.",
+    )
+
+    options = parser.parse_args(args)
+    if options.verbose:
+        logging.basicConfig(level=logging.DEBUG)
+
+    logging.debug(options)
+    gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token)
+    jobs = get_pipeline_jobs(
+        gitlab, options.project, options.pipeline, options.name, options.stage
+    )
+
+    def sort_by_finish_ts(j):
+        return j.finished_at
+
+    jobs.sort(key=sort_by_finish_ts)
+    job = jobs[0]
+
+    filename = download_job_artifact(
+        gitlab, dest=os.path.basename(options.path), path=options.path, job=job
+    )
+    print("Downloaded {} for job {} to {}".format(options.path, job.name, filename))
+
+
+if __name__ == "__main__":
+    main(sys.argv[1:])
diff --git a/scripts/download_job_artifacts.py b/scripts/download_job_artifacts.py
new file mode 100755
index 0000000..07717b1
--- /dev/null
+++ b/scripts/download_job_artifacts.py
@@ -0,0 +1,164 @@
+#!/usr/bin/env python3
+"""
+
+Downloads the job artifacts of a given job
+
+"""
+
+import argparse
+import logging
+import sys
+import os
+import zipfile
+import tempfile
+import gitlab as gl
+
+__author__ = "Jonas Höppner"
+__email__ = "jonas.hoeppner@garz-fricke.com"
+
+GITLAB_SERVER = "https://git.seco.com"
+
+verbose = 0
+
+
+def download_job_artifacts(gitlab, dest, job, project=None, extract=False):
+    """Downloads the artifacts and stores them ar dest/job_id
+    If extract is set, the downloaded zipfile is extracted and removed.
+    Returns either the dest path or the zipfile name.
+    """
+    # Accept either a gitlab job object, or the project and the job id
+    if isinstance(job, gl.v4.objects.jobs.ProjectJob):
+        gl_job = job
+    else:
+        # Accept either an gitlab object, or name or id to specify a project
+        if isinstance(project, gl.v4.objects.projects.Project):
+            gl_project = project
+        else:
+            gl_project = gitlab.projects.get(project)
+        gl_job = gl_project.jobs.get(job)
+
+    dest = os.path.join(dest, str(gl_job.id))
+    os.makedirs(dest)
+    file_name = os.path.join(dest, "artifacts.zip")
+    with open(file_name, "wb") as f:
+        gl_job.artifacts(streamed=True, action=f.write)
+    if not extract:
+        return file_name
+
+    zipfile.ZipFile(file_name).extractall(dest)
+    os.remove(file_name)
+    return dest
+
+
+def download_job_artifact(gitlab, dest, path, job, project=None):
+    """Downloads one file from artifacts and stores it at dest"""
+    # Accept either a gitlab job object, or the project and the job id
+    if isinstance(job, gl.v4.objects.jobs.ProjectJob):
+        gl_job = job
+    else:
+        # Accept either an gitlab object, or name or id to specify a project
+        if isinstance(project, gl.v4.objects.projects.Project):
+            gl_project = project
+        else:
+            gl_project = gitlab.projects.get(project)
+        gl_job = gl_project.jobs.get(job)
+
+    with open(dest, "wb") as f:
+        gl_job.artifact(path=path, streamed=True, action=f.write)
+
+    return dest
+
+
+def main(args):
+    parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]")
+
+    parser.add_argument(
+        "--gitlab-url",
+        help="""URL to the GitLab instance""",
+        dest="gitlab_url",
+        action="store",
+        default=GITLAB_SERVER,
+    )
+    parser.add_argument(
+        "--token",
+        help="""GitLab REST API private access token""",
+        dest="token",
+        required=True,
+    )
+    parser.add_argument(
+        "--project",
+        action="store",
+        dest="project",
+        help="Specify the project by either by id or by path.",
+        required=True,
+    )
+    parser.add_argument(
+        "--job",
+        action="store",
+        dest="job",
+        help="Specify the job by id.",
+        required=True,
+    )
+    parser.add_argument(
+        "--extract",
+        action="store_true",
+        dest="job",
+        default=False,
+        help="Specify if the artifacts should be extracted after download.",
+    )
+    parser.add_argument(
+        "--dest",
+        "--destination",
+        action="store",
+        dest="destination",
+        default=None,
+        help="Folder where the artifacts are stored in, a local tmpfolder is generated if omnitted.",
+    )
+    parser.add_argument(
+        "--path",
+        action="store",
+        default=None,
+        help="Path inside the artifacts, if set only one single file is downloaded instead of the complete artifacts.",
+    )
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        action="count",
+        dest="verbose",
+        default=0,
+        help="Increase verbosity.",
+    )
+
+    options = parser.parse_args(args)
+    if options.verbose:
+        logging.basicConfig(level=logging.DEBUG)
+
+    logging.debug(options)
+    gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token)
+
+    if options.path is None:
+        if options.destination is None:
+            destination = tempfile.mkstemp()
+        else:
+            destination = options.destination
+
+        filename = download_job_artifacts(
+            gitlab, destination, options.job, options.project, extract=True
+        )
+        print("Downloaded artifacts for job {} to {}".format(options.job, filename))
+    else:
+        if options.destination is None:
+            destination = tempfile.mkdtemp()
+        else:
+            destination = options.destination
+
+        filename = download_job_artifact(
+            gitlab, destination, options.path, options.job, options.project
+        )
+        print(
+            "Downloaded {} for job {} to {}".format(options.path, options.job, filename)
+        )
+
+
+if __name__ == "__main__":
+    main(sys.argv[1:])
diff --git a/scripts/generate_release_metadata.py b/scripts/generate_release_metadata.py
index f1f6418..01752a7 100755
--- a/scripts/generate_release_metadata.py
+++ b/scripts/generate_release_metadata.py
@@ -7,39 +7,29 @@ from datetime import datetime
 def generate_metadata(
     machine,
     version,
-    artifacts_image,
+    image_artifacts,
     sdk,
-    output_dir,
-    outlocal_dir,
+    output_file,
 ):
-    """Generates a metainfo.json for the release"""
+    """Generate a metainfo.json for the release and write it to output_file."""
 
     install_script = None
     licenses = None
     image_general = None
     image_wic = None
 
-    # Join filepath for metadata
-
-    if output_dir is not None:
-        filepath = os.path.join(output_dir, machine, "metainfo.json")
-    elif outlocal_dir is not None:
-        filepath = os.path.join(outlocal_dir, machine, "metainfo.json")
-    else:
-        print("Error: Filepath is empty")
-        return -1
-
     # Collect metadata and write to metainfo.json
 
-    for artifact in artifacts_image:
-        if artifact == "fng-install.sh":
-            install_script = artifact
-        elif artifact == "license.manifest":
-            licenses = artifact
-        elif artifact.endswith(machine + ".tar.gz"):
-            image_general = artifact
-        elif artifact.endswith(machine + ".wic"):
-            image_wic = artifact
+    for artifact in image_artifacts:
+        filename = os.path.basename(artifact)
+        if filename == "fng-install.sh":
+            install_script = filename
+        elif filename == "license.manifest":
+            licenses = filename
+        elif filename.endswith(machine + ".tar.gz"):
+            image_general = filename
+        elif filename.endswith(machine + ".wic"):
+            image_wic = filename
 
     metadata = dict()
 
@@ -78,5 +68,5 @@ def generate_metadata(
         new_file["path"] = licenses
         metadata["files"].append(new_file)
 
-    with open(filepath, "w", encoding="utf-8") as file:
+    with open(output_file, "w", encoding="utf-8") as file:
         file.write(json.dumps(metadata))
diff --git a/scripts/get_integration_sources.py b/scripts/get_integration_sources.py
new file mode 100755
index 0000000..e78916c
--- /dev/null
+++ b/scripts/get_integration_sources.py
@@ -0,0 +1,107 @@
+#!/usr/bin/env python3
+import argparse
+import re
+import sys
+from gitlab import Gitlab, GitlabGetError
+from gitlab.v4.objects import Group
+
+
+def get_integration_sources(manifest_project: str, manifest_branch: str, group: Group):
+    """
+    Get a list of projects and branches in the given group which are configured for
+    automatic integration into the given branch of the given manifest project.
+    """
+    integration_sources = []
+    gitlab = group.manager.gitlab
+
+    # Recurse into subgroups
+    for g in group.subgroups.list():
+        subgroup = gitlab.groups.get(g.id)
+        integration_sources += get_integration_sources(
+            manifest_project, manifest_branch, subgroup
+        )
+
+    # Regex to check INTEGRATION variable against
+    regex = f":{manifest_project}:{manifest_branch}$"
+
+    for project in group.projects.list():
+        try:
+            project = gitlab.projects.get(project.id)
+            if not project.archived and project.jobs_enabled:
+                integrations = project.variables.get("INTEGRATION").value
+                for integration in integrations.splitlines():
+                    if re.search(regex, integration):
+                        source_branch = integration.split(":")[0]
+                        integration_sources.append(
+                            {
+                                "project": project.path_with_namespace,
+                                "branch": source_branch,
+                            }
+                        )
+        except GitlabGetError as e:
+            if e.response_code == 404:  # not found
+                pass
+            elif e.response_code == 403:  # forbidden
+                sys.exit(
+                    (
+                        "ERROR: could not get INTEGRATION variable of project %s\n"
+                        % project.path_with_namespace
+                    )
+                    + e.error_message
+                )
+            else:
+                raise
+
+    return integration_sources
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "--gitlab-url",
+        help="""URL to the GitLab instance""",
+        dest="gitlab_url",
+        required=True,
+    )
+    parser.add_argument(
+        "--token",
+        help="""GitLab REST API private access token""",
+        dest="token",
+        required=True,
+    )
+    parser.add_argument(
+        "--manifest-project",
+        help="""name of the manifest project""",
+        dest="manifest_project",
+        required=True,
+    )
+    parser.add_argument(
+        "--manifest-branch",
+        help="""manifest branch""",
+        dest="manifest_branch",
+        required=True,
+    )
+    parser.add_argument(
+        "--group",
+        help="""group path or id to limit search scope to""",
+        dest="group",
+        required=True,
+    )
+
+    args, _ = parser.parse_known_args()
+
+    gitlab = Gitlab(args.gitlab_url, private_token=args.token)
+    group = gitlab.groups.get(args.group)
+
+    integration_sources = get_integration_sources(
+        args.manifest_project,
+        args.manifest_branch,
+        group,
+    )
+
+    for source in integration_sources:
+        print("%s:%s" % (source["project"], source["branch"]))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/get_manifest_projects.py b/scripts/get_manifest_projects.py
deleted file mode 100755
index 03e5798..0000000
--- a/scripts/get_manifest_projects.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python3
-import argparse
-import sys
-from furl import furl
-from lxml import etree
-
-
-def main():
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        "--manifest",
-        help="""manifest file to parse projects from""",
-        dest="manifest",
-        required=True,
-    )
-    parser.add_argument(
-        "--remote",
-        help="""get only projects with this remote""",
-        dest="remote",
-        required=False,
-    )
-    parser.add_argument(
-        "--concat-namespaces",
-        help="""parse namespace from fetch URL and prepend it to project names""",
-        dest="concat_namespaces",
-        action="store_true",
-        required=False,
-    )
-
-    args, _ = parser.parse_known_args()
-
-    # Parse manifest file
-    try:
-        manifest = etree.parse(args.manifest)
-    except FileNotFoundError:
-        sys.exit("ERROR: file '%s' not found" % args.manifest)
-
-    # Get namespace from remote
-    # This is needed for cases where the remote URL contains a part of the project
-    # namespace (e.g. "ssh://git@gitlab.com/garz-fricke/yocto") and the project name
-    # contains another part of it (e.g. "layers/meta-seconorth-machine"). There is no
-    # GitLab API call which will find a project given this information.
-    # Thus we are adding a possibility to parse the namespace from the remote and pre-
-    # pend it to the project name in order to return it including its complete namespace
-    # (e.g. "garz-fricke/yocto/layers/meta-seconorth-machine").
-    if args.concat_namespaces:
-        remote = manifest.find("remote[@name='%s']" % args.remote)
-        if remote is None:
-            sys.exit("ERROR: remote '%s' not found in manifest" % args.remote)
-        path = furl(remote.get("fetch")).path
-        prefix = str(path).strip("/") + "/"
-    else:
-        prefix = ""
-
-    # Find project references in manifest
-    if args.remote:
-        find_expression = "project[@remote='%s']" % args.remote
-    else:
-        find_expression = "project"
-    projects = manifest.findall(find_expression)
-
-    for project in projects:
-        print(prefix + project.get("name"))
-
-
-if __name__ == "__main__":
-    main()
diff --git a/scripts/get_pipeline_jobs.py b/scripts/get_pipeline_jobs.py
new file mode 100755
index 0000000..71f7f12
--- /dev/null
+++ b/scripts/get_pipeline_jobs.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python3
+"""
+
+Queries the jobs of a given pipeline, filtered by stage
+
+"""
+
+import argparse
+import logging
+import sys
+import gitlab as gl
+
+__author__ = "Jonas Höppner"
+__email__ = "jonas.hoeppner@garz-fricke.com"
+
+GITLAB_SERVER = "https://git.seco.com"
+
+verbose = 0
+
+
+def get_pipeline_pipelinejobs(gitlab, project, pipeline, name=None, stage=None):
+    # Accept either an gitlab object, or name or id to specify a project
+    if isinstance(project, gl.v4.objects.projects.Project):
+        gl_project = project
+    else:
+        gl_project = gitlab.projects.get(project)
+
+    gl_pipeline = gl_project.pipelines.get(pipeline)
+    jobs = gl_pipeline.jobs.list(all=True)
+    if stage is not None:
+        jobs = list(filter(lambda j: j.stage == stage, jobs))
+    if name is not None:
+        jobs = list(filter(lambda j: j.name == name, jobs))
+
+    return jobs
+
+
+def get_pipeline_jobs(gitlab, project, pipeline, name=None, stage=None):
+    # Accept either an gitlab object, or name or id to specify a project
+    if isinstance(project, gl.v4.objects.projects.Project):
+        gl_project = project
+    else:
+        gl_project = gitlab.projects.get(project)
+
+    pipeline_jobs = get_pipeline_pipelinejobs(gitlab, project, pipeline, name, stage)
+    jobs = []
+    # Project Jobs from Pipeline Job
+    for job in pipeline_jobs:
+        jobs.append(gl_project.jobs.get(job.id))
+    return jobs
+
+
+def get_pipeline_job_ids(gitlab, project, pipeline, name=None, stage=None):
+    jobs = get_pipeline_pipelinejobs(gitlab, project, pipeline, name, stage)
+    return [j.id for j in jobs]
+
+
+def main(args):
+    parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]")
+
+    parser.add_argument(
+        "--gitlab-url",
+        help="""URL to the GitLab instance""",
+        dest="gitlab_url",
+        action="store",
+        default=GITLAB_SERVER,
+    )
+    parser.add_argument(
+        "--token",
+        help="""GitLab REST API private access token""",
+        dest="token",
+        required=True,
+    )
+    parser.add_argument(
+        "--project",
+        action="store",
+        dest="project",
+        help="Specify the project by either by id or by path.",
+        required=True,
+    )
+    parser.add_argument(
+        "--pipeline",
+        action="store",
+        dest="pipeline",
+        help="Specify the pipeline by id.",
+    )
+    parser.add_argument(
+        "-s",
+        "--stage",
+        action="store",
+        default=None,
+        help="Filter the jobs by the given stage, if omnitted all jobs are returned.",
+    )
+    parser.add_argument(
+        "-n",
+        "--name",
+        action="store",
+        default=None,
+        help="Filter the jobs by given name, if omnitted all jobs are returned.",
+    )
+    parser.add_argument(
+        "-v",
+        "--verbose",
+        action="count",
+        dest="verbose",
+        default=0,
+        help="Increase verbosity.",
+    )
+
+    options = parser.parse_args(args)
+    if options.verbose:
+        logging.basicConfig(level=logging.DEBUG)
+
+    logging.debug(options)
+    gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token)
+    jobs = get_pipeline_jobs(
+        gitlab, options.project, options.pipeline, options.name, options.stage
+    )
+
+    for j in jobs:
+        print(j.name, j.id)
+
+    job_ids = get_pipeline_job_ids(
+        gitlab, options.project, options.pipeline, options.name, options.stage
+    )
+    for j in job_ids:
+        print(j)
+
+
+if __name__ == "__main__":
+    main(sys.argv[1:])
diff --git a/scripts/integrate_into_manifest.py b/scripts/integrate_into_manifest.py
index 223c5ff..26444c5 100755
--- a/scripts/integrate_into_manifest.py
+++ b/scripts/integrate_into_manifest.py
@@ -71,12 +71,12 @@ def update_srcrev(srcrev, recipe_name, new_revision):
 
 
 def integrate_into_manifest(
-    manifest_project,
-    integration_base,
+    manifest_project: Project,
+    manifest_branch,
     manifest_file,
     srcrev_file,
     recipe_name,
-    project,
+    project: Project,
     merge_request,
 ):
     gitlab = manifest_project.manager.gitlab
@@ -92,14 +92,15 @@ def integrate_into_manifest(
 
         # Checkout manifest
         # TODO replace checkout with gitlab api access
+        print("Cloning manifest repo: %s" % manifest_project.http_url_to_repo)
         try:
             manifest_repo = Repo.clone_from(
-                clone_url.url, manifest_dir, branch=integration_base
+                clone_url.url, manifest_dir, branch=manifest_branch
             )
         except GitCommandError as e:
             sys.exit("ERROR: could not clone manifest repository\n" + str(e))
         except IndexError:
-            sys.exit("ERROR: branch '%s' not found" % integration_base)
+            sys.exit("ERROR: branch '%s' not found" % manifest_branch)
 
         # Special handling for the gitlab-ci integration
         # When the branch 'merge_request.source_branch' already starts with
@@ -115,17 +116,18 @@ def integrate_into_manifest(
             )
             logging.debug("Heads: %s", manifest_repo.heads)
             manifest_repo.heads[integration_branch].checkout()
-            print(manifest_repo.git.log("--oneline", "-n", "5"))
+            logging.debug(manifest_repo.git.log("--oneline", "-n", "5"))
+            print("Using existing integration branch: %s" % integration_branch)
         else:
             # Create integration branch (delete former one if already exists)
             integration_branch = common.integration_branch_name(
-                project.name, merge_request.source_branch
+                project.name, merge_request.source_branch, manifest_branch
             )
             for ref in manifest_repo.references:
                 if integration_branch == ref.name:
                     manifest_repo.delete_head(ref)
 
-            logging.debug("Integration branch: %s", integration_branch)
+            print("Creating integration branch: %s" % integration_branch)
             manifest_repo.head.set_reference(
                 manifest_repo.create_head(integration_branch)
             )
@@ -178,7 +180,6 @@ def integrate_into_manifest(
         manifest_revision = common.commit_and_push(
             manifest_project,
             manifest_repo,
-            integration_branch,
             message,
             gitlab.user.username,
             gitlab.user.email,
@@ -209,9 +210,9 @@ def main():
         required=True,
     )
     parser.add_argument(
-        "--integration-base",
+        "--manifest-branch",
         help="""manifest branch to branch off from""",
-        dest="integration_base",
+        dest="manifest_branch",
         required=True,
     )
     parser.add_argument(
@@ -279,7 +280,7 @@ def main():
 
     manifest_revision = integrate_into_manifest(
         manifest_project=manifest_project,
-        integration_base=args.integration_base,
+        manifest_branch=args.manifest_branch,
         manifest_file=args.manifest_file,
         srcrev_file=args.srcrev_file,
         recipe_name=args.recipe_name,
diff --git a/scripts/lava_api.py b/scripts/lava_api.py
new file mode 100755
index 0000000..44bbdf1
--- /dev/null
+++ b/scripts/lava_api.py
@@ -0,0 +1,430 @@
+#!/usr/bin/env python3
+#
+# Classes to access lava via the API
+#
+
+import requests
+from ruamel.yaml import YAML
+import re
+import logging
+import json
+
+from lava_credentials import get_lava_host, get_lava_credentials
+
+
+class LavaException(Exception):
+    pass
+
+
+class Lava:
+    """Access functions for the lava rest api, the class
+    holds the token and and the host.
+    All other requests should be stateless.
+    """
+
+    def __init__(self, host, token):
+        self._host = host
+        self._token = token
+
+    def plain_request(self, url):
+        response = requests.get(
+            url=url,
+            headers={"Authorization": "Token %s" % self._token},
+        )
+
+        # Handle errors
+        if not response.ok:
+            logging.error("Response from lava %d", response.status_code)
+            return response.ok, None
+        logging.debug("Response from lava %d", response.status_code)
+
+        return response.ok, response.text
+
+    def request(self, suburl):
+        # Get test results via LAVA REST API
+        ok, text = self.plain_request(
+            "http://{host}/api/v0.2/{suburl}".format(host=self._host, suburl=suburl)
+        )
+        return ok, text
+
+    def request_json(self, suburl):
+        ok, text = self.request(suburl)
+        if not ok:
+            raise LavaException("Failed to query lava suburl: %s" % suburl)
+
+        results = json.loads(text)
+        return results
+
+    def get_job_suites(self, jobid):
+        return self.request_json("jobs/{}/suites/".format(jobid))
+
+    def get_job_details(self, jobid):
+        return self.request_json("jobs/{}/".format(jobid))
+
+    def get_test_results(self, jobid):
+        return self.request_json("jobs/{}/tests/".format(jobid))
+
+    def get_test_log(self, jobid):
+        ok, content = self.request("jobs/{}/logs/".format(jobid))
+        if not ok:
+            raise LavaException("Failed to query lava for test log of job %d" % jobid)
+
+        # print(content)
+        yaml = YAML(typ="safe")
+        log = yaml.load(content)
+
+        return log
+
+    def get_device_list(self):
+        return self.request_json("devices/")
+
+    def get_device(self, name):
+        return self.request_json("devices/{}/".format(name))
+
+    def get_devicetypes(self, name):
+        return self.request_json("devicetypes/{}".format(name))
+
+    def get_tag_list(self):
+        return self.request_json("/tags/")
+
+
+class LavaTest:
+    """Abstraction for one lava test"""
+
+    def __init__(self, parent, test, joblog):
+
+        self._test = test
+        self.parent = parent
+        self.heading = None
+        self.test_case_id = test["name"]
+        self.result = self._test["result"] == "pass"
+        self.measurement = self._test["measurement"]
+        self.timestamp = self._test["logged"]
+
+        log_end = test["end_log_line"]
+        if log_end is None:
+            logging.debug(test)
+            raise LavaException("No logline defined, skipping this test")
+
+        (
+            log_start,
+            log_end,
+            description_start,
+            description_end,
+        ) = self._search_log_for_test(joblog, log_end)
+
+        self.log = joblog[log_start:log_end]
+        self.description = ""
+
+        if description_start is not None and description_end is not None:
+            for line in joblog[description_start + 1 : description_end]:
+                if line["lvl"] != "target":
+                    continue
+                if self.heading is None:
+                    if line["msg"].startswith("*"):
+                        self.heading = ""
+                        self.description += line["msg"] + "\n"
+                    else:
+                        self.heading = line["msg"]
+                else:
+                    if len(line["msg"]) > 0:
+                        self.description += line["msg"] + "\n"
+
+    @property
+    def name(self):
+        name = self.heading
+        if name is None or len(name) == 0:
+            name = self.test_case_id
+        return name
+
+    @property
+    def machine(self):
+        return self.parent.machine
+
+    @property
+    def device(self):
+        return self.parent.device
+
+    def __lt__(self, other):
+        return self.timestamp < other.timestamp
+
+    def __str__(self):
+        return self.name + ": " + str(self.result)
+
+    def _search_log_for_test(self, log, log_end):
+        """Loops reversely over the log and tries to find the relevant lines
+        Returns the found line numbers.
+        """
+
+        # Search end of log output
+        for line_no in range(log_end, len(log)):
+            # logging.debug(line_no)
+
+            line = log[line_no]
+            if line["lvl"] == "results" and line["msg"]["case"] == self.test_case_id:
+                log_end = line_no
+                break
+
+        if line_no >= len(log) - 1:
+            raise LavaException("Failed to find test in log, skipping this test")
+
+        log_start = None
+        description_start = None
+        description_end = None
+
+        for line_no in range(log_end - 1, 0, -1):
+            # logging.debug(line_no)
+            line = log[line_no]
+
+            # There seem to be two cases
+            # Here the LAVA_SIGNAL_STARTRUN is used as start marker
+            if line["lvl"] == "target" and line["msg"].startswith(
+                "<LAVA_SIGNAL_STARTRUN"
+            ):
+                log_start = line_no
+                break
+
+            # Here the prior 'results' output is used as start marker
+            # This is relevant when there are multiple tests in one
+            # file
+            if line["lvl"] == "results":
+                log_start = line_no + 1
+
+            # Search for the lava test description
+            if line["lvl"] == "target" and line["msg"].startswith(
+                "<TESTCASE_DESCRIPTION_END {}>".format(self.test_case_id)
+            ):
+                description_end = line_no
+
+            if line["lvl"] == "target" and line["msg"].startswith(
+                "<TESTCASE_DESCRIPTION_START {}>".format(self.test_case_id)
+            ):
+                description_start = line_no
+
+            if (
+                description_start is not None
+                and description_end is not None
+                and log_start is not None
+            ):
+                break
+            if log_start is not None and line_no < log_start - 100:
+                # Don't continue searching forever
+                break
+
+        # It seems the output of subsequent tests overlaps sometimes
+        # The description may already start but the result of the prior
+        # test comes few lines later
+        if description_start is not None == log_start > description_start:
+            log_start = description_start
+
+        if log_start is None:
+            raise LavaException("Failed to find test in log, skipping this test")
+
+        return log_start, log_end, description_start, description_end
+
+
+class LavaDevice:
+    """Abstraction of a lava device"""
+
+    def __init__(self, data, tags=None):
+        self.name = data["hostname"]
+        tag_ids = data["tags"]
+        self.data = data
+        self.tags = []
+        self._serial = None
+        self._article_no = None
+        self._hw_revision = None
+
+        if tags is not None:
+            for t in tag_ids:
+                if t in tags.keys():
+                    tag = tags[t]
+                    self.tags.append(tag)
+                    if tag.is_serial:
+                        self._serial = tag
+                    elif tag.is_article_no:
+                        self._article_no = tag
+                    elif tag.is_hw_revision:
+                        self._hw_revision = tag
+
+    @property
+    def machine(self):
+        return re.sub(r"\d", "", self.name)
+
+    @property
+    def serial(self):
+        if self._serial is None:
+            return ""
+        return self._serial.name
+
+    @property
+    def hw_revision(self):
+        if self._hw_revision is None:
+            return ""
+        return self._hw_revision.name
+
+    @property
+    def article_no(self):
+        if self._article_no is None:
+            return ""
+        return self._article_no.name
+
+    def __lt__(self, other):
+        return self.name < other.name
+
+    @staticmethod
+    def devicelist_from_lava(lava, tags=None, filter_types=None):
+        if filter_types is None:
+            filter_types = ["karl", "lxc"]
+        devices = lava.get_device_list()
+        device_list = {}
+        for d in devices["results"]:
+            if d["device_type"] in filter_types:
+                continue
+            device_list[d["hostname"]] = LavaDevice(d, tags)
+        return device_list
+
+    def __str__(self):
+        return "LavaDevice " + self.name
+
+
+class LavaTag:
+    """Abstraction of a lava device"""
+
+    def __init__(self, data):
+        self.name = data["name"]
+        self.description = data["description"]
+        self.id = data["id"]
+
+    def __str__(self):
+        return "LavaTag " + self.name
+
+    @property
+    def is_serial(self):
+        return re.match(r"^\d{7,10}$", self.name) is not None
+
+    @property
+    def is_article_no(self):
+        return re.match(r"^900-\d{4}R\w?$", self.name) is not None
+
+    @property
+    def is_hw_revision(self):
+        return re.match(r"^v\d{1,2}\.\d{1,3}(\.\d{1,3})?$", self.name) is not None
+
+    @staticmethod
+    def taglist_from_lava(lava):
+        tags = lava.get_tag_list()
+        tag_list = {}
+        for t in tags["results"]:
+            tag_list[t["id"]] = LavaTag(t)
+        return tag_list
+
+
+class LavaSuite:
+    """Abstraction of lava suite"""
+
+    def __init__(self, parent, suite, joblog):
+        self._suite = suite
+        self.lava = parent.lava
+        self.parent = parent
+        self.tests = []
+        self.name = self._suite["name"]
+        s = self.name.split("_", 2)
+        if len(s) > 1:
+            self.name = s[1]
+
+        for test in self._get_tests():
+            try:
+                lava_test = LavaTest(self, test, joblog)
+                self.tests.append(lava_test)
+            except LavaException as e:
+                logging.debug("%s: %s", str(e), test["name"])
+
+        self.tests.sort()
+
+    @property
+    def machine(self):
+        return self.parent.machine
+
+    @property
+    def device(self):
+        return self.parent.device
+
+    def _get_tests(self):
+        ok, text = self.lava.plain_request(self._suite["resource_uri"] + "tests/")
+        if not ok:
+            raise LavaException("Failed to query lava for suite's tests")
+
+        return json.loads(text)["results"]
+
+    def __lt__(self, other):
+        return self.tests[0] < other.tests[0]
+
+    def __str__(self):
+        return "TestSuite " + self.name
+
+
+class LavaJob:
+    """Abstraction for a lava job containing multiple tests
+    on one device.
+    """
+
+    def __init__(self, lava, jobid, device_list=None):
+        """Queries all needed information using the given
+        lava class and stores
+        """
+        self.jobid = jobid
+        self.lava = lava
+
+        self._jobdetails = self.lava.get_job_details(jobid)
+
+        self._suites = self.lava.get_job_suites(jobid)["results"]
+        self._test_log = self.lava.get_test_log(jobid)
+
+        self.name = self._jobdetails["description"]
+        self.device = self._jobdetails["actual_device"]
+        # Remove the number from device name to get the machine
+        self.machine = re.sub(r"\d", "", self.device)
+        self.device_type = self._jobdetails["requested_device_type"]
+
+        if device_list is not None:
+            if self.device in device_list:
+                self.device = device_list[self.device]
+
+        self.ok = (
+            self._jobdetails["state"] == "Finished"
+            and self._jobdetails["health"] == "Complete"
+        )
+        self.suites = []
+
+        if not self.ok:
+            logging.warning(
+                "The job %d is not finished or it infrastructure problems", jobid
+            )
+            return
+
+        for suite in self._suites:
+            # Skip the internal lava steps
+            if suite["name"] == "lava":
+                continue
+            try:
+                lava_suite = LavaSuite(self, suite, self._test_log)
+                self.suites.append(lava_suite)
+            except LavaException as e:
+                logging.debug("%s: %s", str(e), suite["name"])
+
+        self.suites.sort()
+
+    def __str__(self):
+        return self.name
+
+
+def main():
+    host = get_lava_host()
+    _, token = get_lava_credentials(host)
+    lava = Lava(host, token)
+    LavaJob(lava, "25142")
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/lava_create_testreport.py b/scripts/lava_create_testreport.py
new file mode 100755
index 0000000..a1f0e80
--- /dev/null
+++ b/scripts/lava_create_testreport.py
@@ -0,0 +1,326 @@
+#!/usr/bin/env python3
+# Queries the test results from lava and generates a report
+#
+
+import argparse
+import logging
+
+from markdown_generator import MarkDownDoc, MarkDownTable
+from lava_api import Lava, LavaTag, LavaDevice, LavaJob
+from lava_credentials import get_lava_host, get_lava_credentials
+
+
+class TestCollection:
+    """Collection of tests with the same id, but different suites ..."""
+
+    def __init__(self, test_case_id, tests):
+        self.test_case_id = test_case_id
+
+        self.tests = {}  # Tests sorted by machine
+        for t in tests:
+            if t.machine not in self.tests.keys():
+                self.tests[t.machine] = []
+            self.tests[t.machine].append(t)
+
+    @property
+    def has_measurements(self):
+        for key in self.tests:
+            for t in self.tests[key]:
+                m = t.measurement
+                if m is not None and len(m) > 0:
+                    return True
+        return False
+
+    def get_measurements(self, machine):
+        if machine not in self.tests.keys():
+            return ""
+        tests = self.tests[machine]
+        out = []
+        for t in tests:
+            out.append("{:.2f}".format(float(t.measurement)))
+        if len(out) == 0:
+            return None
+        return "/".join(out)
+
+    @property
+    def name(self):
+        t = self.get_test()
+        if t is None:
+            return self.test_case_id
+        return t.name
+
+    # Sort alphabetically
+    def __lt__(self, other):
+        return str(self.get_test()) < str(other.get_test())
+
+    def get_test(self, index=0):
+        keys = list(self.tests.keys())
+        if len(keys) < 1:
+            return None
+        if len(self.tests[keys[index]]) < 1:
+            return None
+        return self.tests[keys[index]][0]
+
+    def get_result(self, machine):
+        if machine not in self.tests.keys():
+            return ""
+        tests = self.tests[machine]
+        count = 0
+        for t in tests:
+            if t.result:
+                count += 1
+        if count == len(tests):
+            return "Passed"
+        if count == 0:
+            return "Failed"
+        return "Partly failed ({} of {})".format(len(tests) - count, len(tests))
+
+    @staticmethod
+    def collections_from_suites(suite_list):
+
+        all_tests = []
+        for suite in suite_list:
+            all_tests += suite.tests
+
+        tests = {}
+        test_collections = []
+        for test in all_tests:
+            if test.test_case_id not in tests.keys():
+                tests[test.test_case_id] = []
+            tests[test.test_case_id].append(test)
+        for k, v in tests.items():
+            test_collections.append(TestCollection(k, v))
+        return test_collections
+
+    def __str__(self):
+        return "TestCollection " + self.name
+
+
+class SuiteCollection:
+    """Collection of lava suites like a row in a table, all suites have the same name"""
+
+    def __init__(self, name, suites):
+        self.name = name
+
+        self.suites = {}  # Test suites sorted by machine
+        for s in suites:
+            if s.machine not in self.suites.keys():
+                self.suites[s.machine] = []
+            self.suites[s.machine].append(s)
+        self.test_collections = TestCollection.collections_from_suites(suites)
+        self.test_collections.sort()
+
+    @staticmethod
+    def markdown_result_table(suite_collections):
+        # Write a header for a table
+        all_machines = SuiteCollection.all_machines(suite_collections)
+
+        table = MarkDownTable()
+        table.set_alignment("llc")
+        header = table.get_header()
+
+        header.add_content(["Testsuite", "Test"], bold=True)
+        for m in all_machines:
+            header.add_content(m.capitalize(), bold=True)
+
+        # Result rows
+        for c in suite_collections:
+            suite_col = c.name
+            for t in c.test_collections:
+                row = table.get_row()
+                row.add_content([suite_col, "[{}](#{})".format(t.name, t.test_case_id)])
+                for m in all_machines:
+                    row.add_content(t.get_result(m))
+                suite_col = ""
+
+        return table
+
+    @property
+    def machines(self):
+        return self.suites.keys()
+
+    @property
+    def devices(self):
+        devices = []
+        for l in self.suites.values():
+            for s in l:
+                d = s.device
+                if d not in devices:
+                    devices.append(d)
+        return devices
+
+    # Sort alphabetically
+    def __lt__(self, other):
+        return self.name < other.name
+
+    def __str__(self):
+        return "SuiteCollection " + self.name
+
+    @staticmethod
+    def collections_from_jobs(job_list):
+
+        all_suites = []
+        for job in job_list:
+            if not job.ok:
+                continue
+            all_suites += job.suites
+        suites = {}
+        suite_collections = []
+        for suite in all_suites:
+            if suite.name not in suites.keys():
+                suites[suite.name] = []
+            suites[suite.name].append(suite)
+        for k, v in suites.items():
+            suite_collections.append(SuiteCollection(k, v))
+
+        suite_collections.sort()
+        return suite_collections
+
+    @staticmethod
+    def all_machines(collections):
+        """Return a list of all machines (santino/santoka/...) used in the given collections"""
+        machines = []
+        for s in collections:
+            for m in s.machines:
+                if m not in machines:
+                    machines.append(m)
+        machines.sort()
+        return machines
+
+    @staticmethod
+    def all_devices(collections):
+        """Return a list of all devices (santino01/santino02/santoka03/...) used in the given collections"""
+        devices = []
+        for s in collections:
+            for m in s.devices:
+                if m not in devices:
+                    devices.append(m)
+        devices.sort()
+        return devices
+
+
+def lava_create_testreport(jobids, lava=None):
+    # Get credentials
+    if lava is None:
+        host = get_lava_host()
+        _, token = get_lava_credentials(host)
+        lava = Lava(host, token)
+
+    lava_tags = LavaTag.taglist_from_lava(lava)
+    lava_devices = LavaDevice.devicelist_from_lava(lava, lava_tags)
+
+    lava_jobs = []
+    for jobid in jobids:
+        lava_jobs.append(LavaJob(lava, jobid, lava_devices))
+
+    suite_collections = SuiteCollection.collections_from_jobs(lava_jobs)
+
+    all_machines = SuiteCollection.all_machines(suite_collections)
+    all_devices = SuiteCollection.all_devices(suite_collections)
+
+    # Write out the markdown
+    doc = MarkDownDoc()
+    doc.add_header("Test report")
+    doc.add_text_block(
+        "These tests are automatically executed on the given devices using lava as test engine."
+    )
+
+    doc.add_header("Overview table with test results", level=1)
+    table = SuiteCollection.markdown_result_table(suite_collections)
+    doc.add_element(table)
+
+    for j in lava_jobs:
+        if not j.ok:
+            doc.add_text_block(
+                "NOTE: Lava job {} for device {} did not complete, results are not part of the table".format(
+                    j.jobid, j.machine.capitalize()
+                )
+            )
+
+    doc.add_header("Test details")
+    for c in suite_collections:
+        doc.add_header("Test suite {}\n".format(c.name), level=2)
+        for t in c.test_collections:
+            doc.add_anchor(t.test_case_id)
+            collapse = doc.add_collapse_block(t.name)
+            collapse.add_text_block(t.get_test().description)
+            if t.has_measurements:
+                collapse.add_text_block("Measurements", italic=True)
+                table = collapse.add_table()
+                header = table.get_header()
+                row = table.get_row()
+
+                for m in all_machines:
+                    header.add_content(m.capitalize(), bold=True)
+                    row.add_content(t.get_measurements(m))
+            collapse.add_text_block("_Devices_")
+            collapse.add_text_block("Test was executed on following devices:")
+            passed = []
+            failed = []
+            for d in all_devices:
+                if d.machine in t.tests.keys():
+                    for test in t.tests[d.machine]:
+                        if test.result:
+                            passed += [d.name.capitalize()]
+                        else:
+                            failed += [d.name.capitalize()]
+
+            if len(passed):
+                collapse.add_text_block("**Passed:**\n" + ", ".join(passed))
+            if len(failed):
+                collapse.add_text_block("**Failed:**\n" + ", ".join(failed))
+
+    doc.add_header("Test devices")
+    doc.add_text_block("These devices where used during the test run.")
+
+    cols = 0
+    for d in all_devices:
+        if cols == 0:
+            table = doc.add_table()
+            header = table.get_row()
+            row_article_no = table.get_row()
+            row_serial = table.get_row()
+            row_hw_revision = table.get_row()
+            header.add_content("")
+            row_serial.add_content("Serial Number", bold=True)
+            row_hw_revision.add_content("Hardware Revision", bold=True)
+            row_article_no.add_content("Article Number", bold=True)
+
+        header.add_content(d.name.capitalize(), bold=True)
+        row_serial.add_content(d.serial)
+        row_hw_revision.add_content(d.hw_revision)
+        row_article_no.add_content(d.article_no)
+        cols += 1
+        if cols > 5:
+            cols = 0
+
+    return doc.to_string()
+
+
+def main():
+    """
+    Create a markdown report for the given lava jobids
+    """
+
+    # Read job file from command line
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "jobids", type=int, nargs="+", help="List of testjob IDs to query"
+    )
+    parser.add_argument(
+        "--verbose",
+        help="""More verbose output.""",
+        action="store_true",
+        default=False,
+    )
+    args = parser.parse_args()
+    if args.verbose:
+        logging.basicConfig(level=10)
+    else:
+        logging.basicConfig(level=40)
+
+    print(lava_create_testreport(args.jobids))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/lava_credentials.py b/scripts/lava_credentials.py
new file mode 100755
index 0000000..282ac07
--- /dev/null
+++ b/scripts/lava_credentials.py
@@ -0,0 +1,93 @@
+#!/usr/bin/env python3
+""" Function to read lava creadentials from environment, configfile or query the user on stdin """
+
+import os
+import configparser
+import getpass
+import sys
+import stat
+
+DEFAULT_LAVA_HOST = "lava"
+DEFAULT_LAVA_CREDENTIAL_FILE = "~/.lava_credentials"
+
+
+def get_lava_credentials(host=None):
+    """
+    Look for credentials in the following order:
+        1. environment variables
+        2. configuration file
+        3. prompt the user
+    If new data has been entered, save it to configuration file
+    """
+
+    if host is None:
+        host = get_lava_host()
+
+    configfile = os.path.expanduser(
+        os.getenv("LAVA_CREDENTIAL_FILE", DEFAULT_LAVA_CREDENTIAL_FILE)
+    )
+
+    config = configparser.ConfigParser()
+    config.read(configfile)
+    new_data = False
+    if "LAVA_USER" in os.environ:
+        lava_user = os.environ["LAVA_USER"]
+    else:
+        try:
+            lava_user = config.get(host, "LAVA_USER")
+        except configparser.Error:
+            print(
+                "Missing user name for '%s'. Please either set it via the LAVA_USER"
+                "environment variable or enter it now." % host
+            )
+            lava_user = input("LAVA_USER: ")
+            new_data = True
+    if "LAVA_TOKEN" in os.environ:
+        lava_token = os.environ["LAVA_TOKEN"]
+    else:
+        try:
+            lava_token = config.get(host, "LAVA_TOKEN")
+        except configparser.Error:
+            print(
+                "Missing authentication token for '%s@%s'. Please either set it via the"
+                "LAVA_TOKEN environment variable or enter it now." % (lava_user, host)
+            )
+            lava_token = getpass.getpass("LAVA_TOKEN: ")
+            new_data = True
+
+    # Ask the user whether to save credentials in private config file
+    if new_data:
+        valid = {"yes": True, "ye": True, "y": True, "": True, "no": False, "n": False}
+        choice = None
+        while not choice in valid:
+            sys.stdout.write(
+                "Do you want to save these credentials in '%s'? " "[Y/n] " % configfile
+            )
+            choice = input().lower()
+        if valid[choice]:
+            if not config.has_section(host):
+                config.add_section(host)
+            config.set(host, "LAVA_USER", lava_user)
+            config.set(host, "LAVA_TOKEN", lava_token)
+            os.chmod(configfile, stat.S_IRUSR | stat.S_IWUSR)
+            with open(configfile, "w", encoding="utf-8") as cfgfile:
+                config.write(cfgfile)
+
+    return lava_user, lava_token
+
+
+def get_lava_host():
+    """Get LAVA host from environment or use default value"""
+    host = os.getenv("LAVA_HOST", DEFAULT_LAVA_HOST)
+    return host
+
+
+def main():
+    """Just test the credential functions"""
+    host = get_lava_host()
+    # Get credentials
+    get_lava_credentials(host)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/markdown_generator.py b/scripts/markdown_generator.py
new file mode 100755
index 0000000..a952a50
--- /dev/null
+++ b/scripts/markdown_generator.py
@@ -0,0 +1,316 @@
+#!/usr/bin/env python3
+#
+# Class to simply generate a markdown file
+#
+
+
+class MarkDownText:
+    def __init__(self, text, bold=False, italic=False):
+        self.bold = bold
+        self.italic = italic
+        self.text = text
+
+    @property
+    def length(self):
+        l = len(self.text)
+        if self.italic:
+            l += 2
+        if self.bold:
+            l += 4
+        return l
+
+    def render(self):
+        out = self.text.splitlines()
+        if len(out) == 0:
+            return [""]
+        for i in range(0, len(out)):
+            if self.bold:
+                out[i] = "**{}**".format(out[i])
+            if self.italic:
+                out[i] = "_{}_".format(out[i])
+        return out
+
+    def __str__(self):
+        return __class__ + " " + self.text
+
+
+class MarkDownTableCell(MarkDownText):
+    def __init__(self, row, col, content, bold=False, italic=False):
+        self.row = row
+        self.col = col
+        super().__init__(content, bold, italic)
+
+    def __str__(self):
+        return "MarkDownTableCell (" + self.row.row + ":" + self.col + ") " + self.text
+
+    @property
+    def alignment(self):
+        return self.row.table.get_alignment(self.col)
+
+    @property
+    def width(self):
+        return self.row.table.get_column_width(self.col)
+
+    def render(self):
+        c = super().render()[0]
+        alignment = self.alignment
+        width = self.width
+
+        width -= 2
+        if alignment == "c":
+            c = c.center(width)
+        elif alignment == "r":
+            c = c.rjust(width)
+        else:  # default 'l'
+            c = c.ljust(width)
+        c = " " + c + " "
+        return [c]
+
+
+class MarkDownTableRow:
+    def __init__(self, table, row):
+        self.cols = []
+        self.table = table
+        self.row = row
+        self._current_col = 0  # which to fill next
+
+    @property
+    def length(self):
+        return len(self.cols)
+
+    def _set_content(self, content, col):
+        if self.length <= col:
+            missing = col + 1 - len(self.cols)
+            self.cols += [None] * missing
+        self.cols[col] = content
+
+    def add_content(self, content, col=-1, bold=False, italic=False):
+        if not isinstance(content, list):
+            content = [content]
+        if col >= 0:
+            self._current_col = col
+        for c in content:
+            self._set_content(
+                MarkDownTableCell(self, self._current_col, c, bold, italic),
+                self._current_col,
+            )
+            self._current_col += 1
+
+    def __str__(self):
+        c = ""
+        for i in self.cols:
+            c += "| " + i.content + " |"
+        return "MarkDownTableRow (" + self.length + " cols) " + c
+
+    def render(self):
+        output = "|"
+        for c in self.cols:
+            output += c.render()[0] + "|"
+        return [output]
+
+
+class MarkDownTable:
+    def __init__(self):
+        self.rows = []
+        self.alignment = "l"
+        self._current_row = 0
+        self.column_width = []
+
+    def set_alignment(self, alignment):
+        """Set the alignment as a string like lccr with one char for each column"""
+        self.alignment = alignment
+
+    @property
+    def length(self):
+        return len(self.rows)
+
+    def get_row(self, row=-1):
+        if row == -1:
+            row = self.length
+
+        if self.length <= row:
+            for i in range(self.length, row + 1):
+                self.rows.append(MarkDownTableRow(self, i))
+        return self.rows[row]
+
+    def get_header(self):
+        return self.get_row(0)
+
+    def __str__(self):
+        return "MarkDownTable (" + self.length + " rows)"
+
+    def get_alignment(self, col):
+        if col < len(self.alignment):
+            return self.alignment[col]
+        return self.alignment[-1]
+
+    def get_column_width(self, col):
+        if col < len(self.column_width):
+            return self.column_width[col] + 2
+        return 60
+
+    def get_column_count(self):
+        max_cols = 0
+        for c in self.rows:
+            if max_cols < c.length:
+                max_cols = c.length
+        return max_cols
+
+    def _render_2nd_line(self):
+        cnt = self.get_column_count()
+        out = "|"
+        for i in range(0, cnt):
+            alignment = self.get_alignment(i)
+            width = self.get_column_width(i)
+            if alignment == "c":
+                out += ":" + "-" * (width - 2) + ":"
+            elif alignment == "r":
+                out += "-" * (width - 1) + ":"
+            else:  # default 'l'
+                out += ":" + "-" * (width - 1)
+            out += "|"
+        return [out]
+
+    def _evaluate_column_width(self):
+        cnt = self.get_column_count()
+        self.column_width = [0] * cnt
+        for row in self.rows:
+            for i in range(0, cnt):
+                if i >= len(row.cols):
+                    continue
+                l = row.cols[i].length
+                if l > self.column_width[i]:
+                    self.column_width[i] = l
+
+    def render(self):
+        self._evaluate_column_width()
+        text = [""]
+        # Header line
+        text += self.get_header().render()
+        text += self._render_2nd_line()
+        for row in self.rows[1:]:
+            text += row.render()
+        return text
+
+
+class MarkDownHeader(MarkDownText):
+    def __init__(self, text, level=0, bold=False, italic=False):
+        self.level = level
+        super().__init__(text, bold, italic)
+
+    def render(self):
+        out = "#" * (self.level + 1) + " " + super().render()[0]
+        return ["", out, ""]
+
+
+class MarkDownTextBlock(MarkDownText):
+    def __init__(self, code, bold=False, italic=False, before=1, after=1):
+        self.before = before
+        self.after = after
+        super().__init__(code, bold, italic)
+
+    def render(self):
+        out = []
+        out += [""] * self.before
+        out += super().render()
+        out += [""] * self.after
+        return out
+
+
+class MarkDownCodeBlock(MarkDownText):
+    def __init__(self, code):
+        super().__init__(code)
+
+    def render(self):
+        out = []
+        out += ["<pre>"]
+        out += super().render()
+        out += ["</pre>"]
+        return out
+
+
+class MarkDownAnchor:
+    def __init__(self, name):
+        self.name = name
+
+    def render(self):
+        out = '<a name="{}"></a>'.format(self.name)
+        return [out]
+
+
+class MarkDownDoc:
+    def __init__(self):
+        self.elements = []
+
+    def add_element(self, MarkDownelement):
+        self.elements.append(MarkDownelement)
+
+    def add_table(self):
+        table = MarkDownTable()
+        self.add_element(table)
+        return table
+
+    def add_header(self, text=None, level=0):
+        header = MarkDownHeader(text, level)
+        self.add_element(header)
+        return header
+
+    def add_text_block(self, text=None, bold=False, italic=False, before=1, after=1):
+        block = MarkDownTextBlock(text, bold, italic, before, after)
+        self.add_element(block)
+        return block
+
+    def add_text(self, text=None, bold=False, italic=False):
+        block = MarkDownText(text, bold, italic)
+        self.add_element(block)
+        return block
+
+    def add_code_clock(self, code):
+        block = MarkDownCodeBlock(code)
+        self.add_element(block)
+        return block
+
+    def add_anchor(self, name):
+        anchor = MarkDownAnchor(name)
+        self.add_element(anchor)
+        return anchor
+
+    def add_collapse_block(self, title=None):
+        collapse = MarkDownCollapseBlock(title)
+        self.add_element(collapse)
+        return collapse
+
+    def render(self):
+        out = []
+        for e in self.elements:
+            out += e.render()
+        return out
+
+    def to_string(self):
+        return "\n".join(self.render())
+
+
+class MarkDownCollapseBlock(MarkDownDoc):
+    def __init__(self, title=None):
+        self.title = title
+        super().__init__()
+
+    def render(self):
+        out = []
+        out += ["<details>"]
+        if self.title is not None:
+            out += ["<summary>{}</summary>".format(self.title)]
+        out += super().render()
+        out += ["</details>"]
+        return out
+
+
+def main():
+    doc = MarkDownDoc()
+    doc.add_header("Hello world")
+    doc.add_text_block("This is the text")
+    print("\n".join(doc.render()))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/merge_into_manifest.py b/scripts/merge_into_manifest.py
index f29cd76..d60daad 100755
--- a/scripts/merge_into_manifest.py
+++ b/scripts/merge_into_manifest.py
@@ -14,9 +14,9 @@ from integrate_into_manifest import integrate_into_manifest
 
 def merge_into_manifest(
     manifest_project,
-    master_branch,
+    manifest_branch,
     project,
-    master_branch_project,
+    project_branch,
     srcrev_file,
     recipe_name,
     commit,
@@ -30,7 +30,7 @@ def merge_into_manifest(
     # Get source merge request
     mrs = get_merge_requests(
         project,
-        target_branch=master_branch_project,
+        target_branch=project_branch,
         state="merged",
         commit=commit,
     )
@@ -52,8 +52,11 @@ def merge_into_manifest(
         )
         return ""
 
-    integration_branch = common.integration_branch_name(project.name, original_branch)
-    target_branch = master_branch
+    target_branch = manifest_branch
+
+    integration_branch = common.integration_branch_name(
+        project.name, original_branch, target_branch
+    )
 
     # Create merge request. If there already is a merge request, we assume that it has
     # been opened manually and thus will be merged manually as well, so we fail here.
@@ -92,7 +95,7 @@ def merge_into_manifest(
             # we want a completely automated process in every case.
             manifest_revision = integrate_into_manifest(
                 manifest_project=manifest_project,
-                integration_base=target_branch,
+                manifest_branch=target_branch,
                 manifest_file=common.manifest_file,
                 srcrev_file=srcrev_file,
                 recipe_name=recipe_name,
@@ -136,9 +139,9 @@ def main():
         required=True,
     )
     parser.add_argument(
-        "--master-branch",
-        help="""master branch to merge changes into""",
-        dest="master_branch",
+        "--manifest-branch",
+        help="""manifest branch to merge changes into""",
+        dest="manifest_branch",
         required=True,
     )
     parser.add_argument(
@@ -148,9 +151,9 @@ def main():
         required=True,
     )
     parser.add_argument(
-        "--master-branch-project",
-        help="""master branch to merge changes into (project)""",
-        dest="master_branch_project",
+        "--project-branch",
+        help="""project branch to merge changes into""",
+        dest="project_branch",
         default=None,
         required=False,
     )
@@ -195,16 +198,15 @@ def main():
     project = common.get_project(gitlab, args.project)
     manifest_project = common.get_project(gitlab, args.manifest_project)
 
-    # If no master_branch_project is set assume manifest and project
-    # master branches are named the same
-    if not args.master_branch_project:
-        args.master_branch_project = args.master_branch
+    # If no project_branch is set, assume manifest and project branches are the same
+    if not args.project_branch:
+        args.project_branch = args.manifest_branch
 
     manifest_revision = merge_into_manifest(
         manifest_project=manifest_project,
-        master_branch=args.master_branch,
+        manifest_branch=args.manifest_branch,
         project=project,
-        master_branch_project=args.master_branch_project,
+        project_branch=args.project_branch,
         srcrev_file=args.srcrev_file,
         recipe_name=args.recipe_name,
         commit=args.commit,
diff --git a/scripts/package_release.py b/scripts/package_release.py
index 9e7a414..2726273 100755
--- a/scripts/package_release.py
+++ b/scripts/package_release.py
@@ -6,74 +6,58 @@ import os
 import sys
 import shutil
 import hashlib
-import tempfile
+
 from convert_md2html import convertmd2html
 from generate_release_metadata import generate_metadata
 
 
-def md5(fname):
+def md5(input_file: str):
+    """
+    Calculate and return the MD5 sum of the given input_file.
+    """
     hash_md5 = hashlib.md5()
-    with open(fname, "rb", encoding=None) as f:
+    with open(input_file, "rb", encoding=None) as f:
         for chunk in iter(lambda: f.read(4096), b""):
             hash_md5.update(chunk)
     return hash_md5.hexdigest()
 
 
-def write_md5sums_file(md5sums, subdir, output_dir, outlocal_dir):
-    with tempfile.TemporaryDirectory() as tmp:
-        source_file = os.path.join(tmp, subdir, "md5sums.txt")
-        os.makedirs(os.path.dirname(source_file), exist_ok=True)
-
-        with open(source_file, "w", encoding="utf-8") as f_md5:
-            for f, h in md5sums.items():
-                f_md5.write("{}  {}\n".format(h, f))
-
-        if output_dir is not None:
-            target_file = os.path.join(
-                output_dir, subdir, os.path.basename(source_file)
-            )
-            print("Copy: %s -> %s" % (source_file, target_file))
-            shutil.copyfile(source_file, target_file, follow_symlinks=True)
-
-        if outlocal_dir is not None:
-            target_file = os.path.join(
-                outlocal_dir, subdir, os.path.basename(source_file)
-            )
-            print("Copy: %s -> %s" % (source_file, target_file))
-            shutil.copyfile(source_file, target_file, follow_symlinks=True)
-
-
-def copy_files(files, input_dir, subdir, output_dir, outlocal_dir):
+def generate_md5sums_file(input_files: list[str], output_file: str):
+    """
+    Calculate MD5 sums of all input_files and write them to output_file.
+    """
     md5sums = {}
-    if output_dir is not None:
-        os.makedirs(os.path.join(output_dir, subdir), exist_ok=True)
-    if outlocal_dir is not None:
-        os.makedirs(os.path.join(outlocal_dir, subdir), exist_ok=True)
-
-    for f in files:
-        source_file = os.path.join(input_dir, f)
+    for f in input_files:
+        # This might should be used somewhere before,
+        # but the previous state was not to fail when a file in
+        # the list did not exist, like in copy files
+        if os.path.exists(f):
+            md5sums[os.path.basename(f)] = md5(f)
+
+    output_dir = os.path.dirname(output_file)
+    if output_dir:
+        os.makedirs(output_dir, exist_ok=True)
+    with open(output_file, "w", encoding="utf-8") as f_md5:
+        for f, h in md5sums.items():
+            f_md5.write("{}  {}\n".format(h, f))
+
+
+def copy_files(files: list[str], target_dir: str):
+    """
+    Copy given files to target_dir. Create target_dir, if it does not exist. Subfolder
+    hierarchies on the input files will not be preserved, only plain files are copied.
+    """
+    if target_dir is None:
+        return
+    os.makedirs(target_dir, exist_ok=True)
+    for source_file in files:
         if os.path.exists(source_file):
-
-            if output_dir is not None:
-                target_file = os.path.join(
-                    output_dir, subdir, os.path.basename(source_file)
-                )
-                print("Copy: %s -> %s" % (source_file, target_file))
-                shutil.copyfile(source_file, target_file, follow_symlinks=True)
-
-            if outlocal_dir is not None:
-                target_file = os.path.join(
-                    outlocal_dir, subdir, os.path.basename(source_file)
-                )
-                print("Copy: %s -> %s" % (source_file, target_file))
-                shutil.copyfile(source_file, target_file, follow_symlinks=True)
-
-            md5sums[os.path.basename(source_file)] = md5(source_file)
+            target_file = os.path.join(target_dir, os.path.basename(source_file))
+            print("Copy: %s -> %s" % (source_file, target_file))
+            shutil.copyfile(source_file, target_file, follow_symlinks=True)
         else:
             print("Missing: " + source_file)
 
-    return md5sums
-
 
 def main():
     parser = argparse.ArgumentParser()
@@ -87,16 +71,6 @@ def main():
         help="""Yocto licenses directory""",
         dest="licenses_dir",
     )
-    parser.add_argument(
-        "--outputdir-upload",
-        help="""Base directory name for uploaded artifacts""",
-        dest="outputdir_upload",
-    )
-    parser.add_argument(
-        "--outputdir-local",
-        help="""Base directory for locally deployed artifacts, should contain absolut path.""",
-        dest="outputdir_local",
-    )
     parser.add_argument(
         "--sdk-dir",
         help="""Yocto sdk directory""",
@@ -107,13 +81,20 @@ def main():
         help="""Documentation directory""",
         dest="doc_dir",
     )
+    parser.add_argument(
+        "--output-dir",
+        help="""Base directory name for output artifacts (can be specified multiple times)""",
+        dest="output_dir",
+        action="append",
+        required=True,
+    )
+    parser.add_argument(
+        "--release-suffix",
+        help="""Suffix to append to the release folder""",
+        dest="release_suffix",
+    )
     args, _ = parser.parse_known_args()
 
-    if args.outputdir_upload is None and args.outputdir_local is None:
-        sys.exit(
-            "ERROR: Either outputdir-local and/or outputdir-upload needs to be specified."
-        )
-
     # Get bitbake variables from testdata.json file
     testdata_files = []
     if args.images_dir is not None:
@@ -121,7 +102,7 @@ def main():
     if args.sdk_dir is not None:
         testdata_files += glob.glob(os.path.join(args.sdk_dir, "*.testdata.json"))
 
-    # Debug stuff
+    # Debug output if no testdata file found
     if not testdata_files:
         if args.images_dir is not None:
             print(args.images_dir)
@@ -133,37 +114,38 @@ def main():
                 print("-- ", f)
         sys.exit("ERROR: no *.testdata.json file found in image or sdk dir.")
 
+    # The required build variables from testdata have the same values for image and SDK
+    # builds, so we just read one of them.
     with open(testdata_files[0], "r", encoding="utf-8") as f:
         buildvars = json.load(f)
 
     machine = buildvars["MACHINE"]
     version = buildvars["DISTRO_VERSION"]
     sdkname = buildvars["TOOLCHAIN_OUTPUTNAME"]
-    artifacts_image = buildvars["DISTRO_IMAGES"].split()
-    artifacts_all = buildvars["DISTRO_RELEASE_ARTEFACTS"].split()
-    artifacts_all.append("BUILD_SRCREVS.log")
+    image_artifacts = buildvars["DISTRO_IMAGES"].split()
+    artifacts = buildvars["DISTRO_RELEASE_ARTEFACTS"].split()
+    artifacts.append("BUILD_SRCREVS.log")
 
+    # Set release name
     if version.startswith("fngsystem"):
-        release_name_local = version.replace("fngsystem", "FNGSystem")
-        # outlocal_base = "/artifacts-fngsystem"
+        release_name = version.replace("fngsystem", "FNGSystem")
     else:
-        release_name_local = "Yocto-%s" % version
-        # outlocal_base = "/artifacts-yocto"
+        release_name = "Yocto-%s" % version
+
+    # Append release suffix
+    if args.release_suffix is not None:
+        release_name = release_name + args.release_suffix
 
     # Create output directories
-    if args.outputdir_upload is not None:
-        output_dir = os.path.join(args.outputdir_upload, release_name_local)
-        os.makedirs(output_dir, exist_ok=True)
-    else:
-        output_dir = None
-    if args.outputdir_local is not None:
-        outlocal_dir = os.path.join(args.outputdir_local, release_name_local)
-        os.makedirs(outlocal_dir, exist_ok=True)
-    else:
-        outlocal_dir = None
+    output_dirs = []
+    for output_dir in args.output_dir:
+        full_output_dir = os.path.join(output_dir, release_name)
+        output_dirs.append(full_output_dir)
+        os.makedirs(full_output_dir, exist_ok=True)
 
-    # Convert markdown files into html and package them
+    # Package documentation files
     if args.doc_dir is not None:
+        # Convert markdown to html
         doc_files = glob.glob(os.path.join(args.doc_dir, "*.md"))
         html_files = []
         for f in doc_files:
@@ -171,20 +153,26 @@ def main():
             convertmd2html(f, fout)
             html_files.append(fout)
 
-        doc_md5sums = copy_files(
-            doc_files + html_files, "", "", output_dir, outlocal_dir
-        )
-        write_md5sums_file(doc_md5sums, "", output_dir, outlocal_dir)
+        files = doc_files + html_files
 
+        # Generate MD5 sums file
+        doc_md5sums_file = "md5sums.txt"
+        generate_md5sums_file(files, doc_md5sums_file)
+        files.append(doc_md5sums_file)
+
+        # Copy files
+        for output_dir in output_dirs:
+            copy_files(files, output_dir)
+
+    # Package image files
     if args.images_dir is not None:
         # Add some additional files to the artifacts
-        for artifact in artifacts_image:
-            artifacts_all.append(artifact.split(".")[0] + ".manifest")
-            artifacts_all.append(artifact.split(".")[0] + ".testdata.json")
+        for artifact in image_artifacts:
+            artifacts.append(artifact.split(".")[0] + ".manifest")
+            artifacts.append(artifact.split(".")[0] + ".testdata.json")
 
-        md5sums = copy_files(
-            artifacts_all, args.images_dir, machine, output_dir, outlocal_dir
-        )
+        # Prepend path to artifacts
+        artifacts = [os.path.join(args.images_dir, artifact) for artifact in artifacts]
 
         # If the path for the licenses is set, we check for the list with all
         # licenses. If the list is found, we copy it to the output directory
@@ -194,52 +182,46 @@ def main():
             license_manifest = glob.glob(
                 os.path.join(args.licenses_dir, "**", "license.manifest")
             )
+            artifacts.append(
+                os.path.join(os.path.dirname(license_manifest[0]), "license.manifest")
+            )
+
+        # Generate MD5 sums file
+        image_md5sums_file = os.path.join(args.images_dir, "md5sums.txt")
+        generate_md5sums_file(artifacts, image_md5sums_file)
+        artifacts.append(image_md5sums_file)
 
-            if license_manifest:
-                md5sums.update(
-                    copy_files(
-                        ["license.manifest"],
-                        os.path.dirname(license_manifest[0]),
-                        machine,
-                        output_dir,
-                        outlocal_dir,
-                    )
-                )
-                artifacts_all.append("license.manifest")
-
-        # Create md5sums file for build artifacts
-        write_md5sums_file(md5sums, machine, output_dir, outlocal_dir)
-
-    # Generate metadata in case of an image build
-    if args.sdk_dir is None:
+        # Generate metadata file
         generate_metadata(
             machine,
             version,
-            artifacts_all,
+            artifacts,
             sdkname,
-            output_dir,
-            outlocal_dir,
+            os.path.join(args.images_dir, "metainfo.json"),
         )
+        artifacts.append(os.path.join(args.images_dir, "metainfo.json"))
+
+        # Copy files
+        for output_dir in output_dirs:
+            copy_files(artifacts, os.path.join(output_dir, machine))
 
-    # Handle SDK if available
+    # Package SDK
     if args.sdk_dir is not None:
         sdkfiles = glob.glob(os.path.join(args.sdk_dir, sdkname + "*"))
-        print("package_release.py: Upload the following sdk files:")
-        print(sdkfiles)
-        sdk_md5sums = copy_files(
-            sdkfiles, "", os.path.join(machine, "sdk"), None, outlocal_dir
-        )
-        write_md5sums_file(
-            sdk_md5sums, os.path.join(machine, "sdk"), None, outlocal_dir
-        )
-    else:
-        print("package_release.py: sdk_dir is not specified")
+
+        # Generate MD5 sums file
+        sdk_md5sums_file = os.path.join(machine, "sdk", "md5sums.txt")
+        generate_md5sums_file(sdkfiles, sdk_md5sums_file)
+        sdkfiles.append(sdk_md5sums_file)
+
+        # Copy files
+        for output_dir in output_dirs:
+            copy_files(sdkfiles, output_dir)
 
     # Store pathes and other stuff in environment variable file
     with open("package.env", "w", encoding="utf-8") as env_file:
         env_file.write("VERSION={}\n".format(version))
         env_file.write("MACHINE={}\n".format(machine))
-        env_file.write("LOCALDIR={}\n".format(outlocal_dir))
 
 
 if __name__ == "__main__":
diff --git a/scripts/retrigger_integrating_projects.py b/scripts/retrigger_integrating_projects.py
new file mode 100755
index 0000000..fe631ec
--- /dev/null
+++ b/scripts/retrigger_integrating_projects.py
@@ -0,0 +1,112 @@
+#!/usr/bin/env python3
+import argparse
+import sys
+from gitlab import Gitlab, GitlabJobRetryError
+
+from get_integration_sources import get_integration_sources
+from get_merge_requests import get_merge_requests
+from retrigger_pipeline_jobs import retrigger_pipeline_jobs
+
+
+def main():
+    """
+    Retrigger a given job in all open MRs of projects in the given group with a target
+    branch that is configured for automatic integration into the given manifest project
+    and branch.
+    """
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "--gitlab-url",
+        help="""URL to the GitLab instance""",
+        dest="gitlab_url",
+        required=True,
+    )
+    parser.add_argument(
+        "--token",
+        help="""GitLab REST API private access token""",
+        dest="token",
+        required=True,
+    )
+    parser.add_argument(
+        "--manifest-project",
+        help="""name of the GitLab manifest project""",
+        dest="manifest_project",
+        required=True,
+    )
+    parser.add_argument(
+        "--manifest-branch",
+        help="""target integration branch""",
+        dest="manifest_branch",
+        required=True,
+    )
+    parser.add_argument(
+        "--group",
+        help="""group path or id to limit search scope to""",
+        dest="group",
+        required=True,
+    )
+    parser.add_argument(
+        "--job",
+        help="""job to retrigger""",
+        dest="job",
+        required=False,
+        default="check",
+    )
+
+    args, _ = parser.parse_known_args()
+
+    gitlab = Gitlab(args.gitlab_url, private_token=args.token)
+    group = gitlab.groups.get(args.group, retry_transient_errors=True)
+
+    integration_sources = get_integration_sources(
+        args.manifest_project, args.manifest_branch, group
+    )
+
+    failed = 0
+
+    for source in integration_sources:
+
+        project = gitlab.projects.get(source["project"], retry_transient_errors=True)
+
+        mrs = get_merge_requests(
+            project,
+            state="opened",
+            target_branch=source["branch"],
+        )
+
+        for mr in mrs:
+            # Get pipeline
+            if not mr.pipeline:
+                print("No pipeline in %s" % mr.web_url)
+                continue
+            pipeline = project.pipelines.get(
+                mr.pipeline.get("id"),
+                retry_transient_errors=True,
+            )
+            try:
+                states = ["success", "running"]
+                jobs = retrigger_pipeline_jobs(
+                    project,
+                    pipeline,
+                    args.job,
+                    states,
+                )
+                if not jobs:
+                    print(
+                        "Could not find any jobs named '%s' with states %s in %s"
+                        % (args.job, states, pipeline.web_url)
+                    )
+            except GitlabJobRetryError as e:
+                print(
+                    "ERROR: Could not retrigger job '%s' in %s: %s"
+                    % (args.job, pipeline.web_url, e)
+                )
+                failed = failed + 1
+                continue
+
+    if failed > 0:
+        sys.exit(1)
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/retrigger_mr_pipeline_job.py b/scripts/retrigger_mr_pipeline_job.py
deleted file mode 100755
index b4ac682..0000000
--- a/scripts/retrigger_mr_pipeline_job.py
+++ /dev/null
@@ -1,89 +0,0 @@
-#!/usr/bin/env python3
-import common
-
-import argparse
-import sys
-from gitlab import Gitlab, GitlabGetError
-
-
-def retrigger_mr_pipeline_job(project, mr, job_name, status_list):
-    # Get pipeline
-    if not mr.pipeline:
-        print("No pipeline in !%s" % mr.iid)
-        return None
-    pipeline = project.pipelines.get(mr.pipeline.get("id"), retry_transient_errors=True)
-
-    # Find job
-    job = None
-    for pipelinejob in pipeline.jobs.list():
-        if pipelinejob.name == job_name:
-            job = project.jobs.get(pipelinejob.id, retry_transient_errors=True)
-    if not job:
-        print("Could not find job '%s' in pipeline of !%s" % (job_name, mr.iid))
-        return None
-
-    # Only retrigger if job is in certain status
-    if job.status not in status_list:
-        return None
-
-    # Retrigger job
-    job.retry()
-    print("Retrigger job '%s' of pipeline #%s:" % (job_name, pipeline.id))
-    job = project.jobs.get(job.id, retry_transient_errors=True)
-    print(job.web_url)
-
-    return job
-
-
-def main():
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        "--gitlab-url",
-        help="""URL to the GitLab instance""",
-        dest="gitlab_url",
-        required=True,
-    )
-    parser.add_argument(
-        "--token",
-        help="""GitLab REST API private access token""",
-        dest="token",
-        required=True,
-    )
-    parser.add_argument(
-        "--project",
-        help="""name of the GitLab project""",
-        dest="project",
-        required=True,
-    )
-    parser.add_argument(
-        "--iid",
-        help="""iid of the merge request""",
-        dest="iid",
-        required=True,
-    )
-    parser.add_argument(
-        "--job",
-        help="""name of the job""",
-        dest="job",
-        required=True,
-    )
-
-    args, _ = parser.parse_known_args()
-
-    gitlab = Gitlab(args.gitlab_url, private_token=args.token)
-    project = common.get_project(gitlab, args.project)
-    try:
-        mr = project.mergerequests.get(args.iid, retry_transient_errors=True)
-    except GitlabGetError as e:
-        sys.exit(
-            "ERROR: could not get merge request %s:!%s: %s"
-            % (project.name, args.iid, e)
-        )
-    if not mr:
-        sys.exit("ERROR: could not find merge request %s!%s" % (project.name, args.iid))
-
-    retrigger_mr_pipeline_job(project, mr, args.job, ["success", "running"])
-
-
-if __name__ == "__main__":
-    main()
diff --git a/scripts/retrigger_mr_pipeline_jobs.py b/scripts/retrigger_mr_pipeline_jobs.py
deleted file mode 100755
index 24019bb..0000000
--- a/scripts/retrigger_mr_pipeline_jobs.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python3
-import common
-
-import argparse
-import sys
-from gitlab import Gitlab, GitlabJobRetryError
-
-from get_merge_requests import get_merge_requests
-from retrigger_mr_pipeline_job import retrigger_mr_pipeline_job
-
-
-def main():
-    parser = argparse.ArgumentParser()
-    parser.add_argument(
-        "--gitlab-url",
-        help="""URL to the GitLab instance""",
-        dest="gitlab_url",
-        required=True,
-    )
-    parser.add_argument(
-        "--token",
-        help="""GitLab REST API private access token""",
-        dest="token",
-        required=True,
-    )
-    parser.add_argument(
-        "--project",
-        help="""name of the GitLab project""",
-        dest="project",
-        required=True,
-    )
-    parser.add_argument(
-        "--state",
-        help="""state of the merge request (opened, closed, locked, or merged)""",
-        dest="state",
-        required=True,
-    )
-    parser.add_argument(
-        "--source-branch",
-        help="""source branch of the merge request""",
-        dest="source_branch",
-        required=False,
-    )
-    parser.add_argument(
-        "--target-branch",
-        help="""target branch of the merge request""",
-        dest="target_branch",
-        required=False,
-    )
-    parser.add_argument(
-        "--commit",
-        help="""commit sha of the merge request""",
-        dest="commit",
-        required=False,
-    )
-    parser.add_argument(
-        "--job",
-        help="""job to retrigger""",
-        dest="job",
-        required=True,
-    )
-
-    args, _ = parser.parse_known_args()
-
-    gitlab = Gitlab(args.gitlab_url, private_token=args.token)
-    project = common.get_project(gitlab, args.project)
-
-    mrs = get_merge_requests(
-        project,
-        state=args.state,
-        source_branch=args.source_branch,
-        target_branch=args.target_branch,
-        commit=args.commit,
-    )
-
-    failed = 0
-    for mr in mrs:
-        try:
-            retrigger_mr_pipeline_job(project, mr, args.job, ["success", "running"])
-        except GitlabJobRetryError as e:
-            print(
-                "ERROR: Could not retrigger job '%s' of %s!%s: %s"
-                % (args.job, args.project, mr.iid, e)
-            )
-            failed = failed + 1
-            continue
-
-    if failed > 0:
-        sys.exit(1)
-
-
-if __name__ == "__main__":
-    main()
diff --git a/scripts/retrigger_pipeline_jobs.py b/scripts/retrigger_pipeline_jobs.py
new file mode 100755
index 0000000..11b5258
--- /dev/null
+++ b/scripts/retrigger_pipeline_jobs.py
@@ -0,0 +1,126 @@
+#!/usr/bin/env python3
+import common
+
+import argparse
+import sys
+from gitlab import Gitlab, GitlabGetError
+from gitlab.v4.objects import Project, ProjectPipeline
+from typing import List
+
+
+def retrigger_pipeline_jobs(
+    project: Project,
+    pipeline: ProjectPipeline,
+    job_name: str,
+    status_list: List[str],
+    include_children: bool = False,
+):
+    jobs = []
+
+    # Recurse to child pipelines
+    if include_children:
+        for bridge in pipeline.bridges.list():
+            if (
+                bridge is not None
+                and bridge.downstream_pipeline is not None
+                and bridge.downstream_pipeline["project_id"] == project.id
+            ):
+                child_pipeline = project.pipelines.get(
+                    bridge.downstream_pipeline["id"],
+                    retry_transient_errors=True,
+                )
+                jobs += retrigger_pipeline_jobs(
+                    project,
+                    child_pipeline,
+                    job_name,
+                    status_list,
+                    include_children=True,
+                )
+
+    # Find job
+    job = None
+    for pipelinejob in pipeline.jobs.list():
+        if pipelinejob.name == job_name:
+            job = project.jobs.get(pipelinejob.id, retry_transient_errors=True)
+    if not job:
+        return jobs
+
+    # Only retrigger if job is in certain status
+    if job.status not in status_list:
+        return jobs
+
+    # Retrigger job
+    job.retry()
+    print("Retrigger job '%s' in %s:" % (job_name, pipeline.web_url))
+    job = project.jobs.get(job.id, retry_transient_errors=True)
+    print(job.web_url)
+    jobs.append(job)
+
+    return jobs
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        "--gitlab-url",
+        help="""URL to the GitLab instance""",
+        dest="gitlab_url",
+        required=True,
+    )
+    parser.add_argument(
+        "--token",
+        help="""GitLab REST API private access token""",
+        dest="token",
+        required=True,
+    )
+    parser.add_argument(
+        "--project",
+        help="""name of the GitLab project""",
+        dest="project",
+        required=True,
+    )
+    parser.add_argument(
+        "--pipeline",
+        help="""pipeline id""",
+        dest="pipeline",
+        required=True,
+    )
+    parser.add_argument(
+        "--job",
+        help="""name of the job""",
+        dest="job",
+        required=True,
+    )
+
+    parser.add_argument(
+        "--include-children",
+        help="""search for job in child pipelines""",
+        dest="include_children",
+        action="store_true",
+        default=False,
+    )
+
+    args, _ = parser.parse_known_args()
+
+    gitlab = Gitlab(args.gitlab_url, private_token=args.token)
+    project = common.get_project(gitlab, args.project)
+    try:
+        pipeline = project.pipelines.get(args.pipeline, retry_transient_errors=True)
+    except GitlabGetError as e:
+        sys.exit("ERROR: could not get pipeline %s: %s" % (args.pipeline, e))
+    if not pipeline:
+        sys.exit("ERROR: could not find pipeline %s" % args.pipeline)
+
+    jobs = retrigger_pipeline_jobs(
+        project,
+        pipeline,
+        args.job,
+        ["success", "running"],
+        args.include_children,
+    )
+
+    print("Retriggered %d jobs for pipeline #%s" % (len(jobs), args.pipeline))
+
+
+if __name__ == "__main__":
+    main()
diff --git a/scripts/submit_test.py b/scripts/submit_test.py
index ba32a51..84b34c3 100755
--- a/scripts/submit_test.py
+++ b/scripts/submit_test.py
@@ -10,6 +10,8 @@ import time
 
 import junitparser
 
+from lava_create_testreport import lava_create_testreport
+
 TESTS_GIT_URL = "git@gitlab.com:garz-fricke/tests/development-tests.git"
 
 
@@ -38,7 +40,6 @@ def call(cmd, stdout=None):
 
 
 def main():
-
     parser = argparse.ArgumentParser()
     parser.add_argument(
         "--verbose",
@@ -97,12 +98,18 @@ def main():
     )
     parser.add_argument(
         "--test-plan",
-        help="""Name pattern of the test plan to use, file needs to be 
+        help="""Name pattern of the test plan to use, file needs to be
                 found in the 'tests' subfolder of the test repo and may use
                 {platform} to be replaced by the actual platform name.""",
         default="{platform}.jinja2",
         dest="test_suite",
     )
+    parser.add_argument(
+        "--report-name",
+        help="""Name of the generated markdown result. If omnitted, report is generation is skipped.""",
+        default=None,
+        dest="report_name",
+    )
     # TODO add parameters to specify branch or revision,
     # but this is needed to be implemented in the tests also.
 
@@ -259,6 +266,13 @@ def main():
             )
         )
 
+    # Create report as MarkDown
+    if args.report_name is not None:
+        filename = os.path.join(args.results_path, args.report_name)
+        os.makedirs(os.path.dirname(filename), exist_ok=True)
+        with open(filename, "w", encoding="utf-8") as f:
+            f.write(lava_create_testreport(jobs))
+
     if passed:
         sys.exit(0)
 
diff --git a/scripts/update_submodule.py b/scripts/update_submodule.py
index d22b47d..ea8f5a9 100755
--- a/scripts/update_submodule.py
+++ b/scripts/update_submodule.py
@@ -85,7 +85,7 @@ def clone_project_and_submodule(project: Project, submodule_name, branch=None):
 
     # Checkout project
     try:
-        repo = Repo.clone_from(clone_url.url, project_dir, branch=branch, depth=1)
+        repo = Repo.clone_from(clone_url.url, project_dir.name, branch=branch, depth=1)
     except GitCommandError as e:
         sys.exit("ERROR: could not clone repository\n" + str(e))
     except IndexError:
@@ -130,7 +130,9 @@ def clone_project_and_submodule(project: Project, submodule_name, branch=None):
     with submodule.config_writer() as writer:
         writer.set("url", submodule_relative_url)
 
-    return repo, submodule_project
+    # We need to keep the TemporaryDirectory object reference project_dir because we
+    # need the cloned repo later, otherwise the directory will be immediately deleted.
+    return repo, submodule_project, project_dir
 
 
 def update_submodule_in_repo(repo: Repo, submodule_project: Project, new_revision):
@@ -209,6 +211,7 @@ def update_submodule_and_include_ref(
 
     submodule_update_needed = True
     project_repo = None
+    project_dir = None
     integration_commit = None
 
     if branch is None:
@@ -233,7 +236,7 @@ def update_submodule_and_include_ref(
             "No commits found in range %s, probably submodule already up-to-date.",
             revision_range,
         )
-        return None, None, None, None
+        return None, None, None, None, None
     logging.debug("New commits: %s", commits)
 
     # Find out if top commit is part of a merge request
@@ -248,7 +251,7 @@ def update_submodule_and_include_ref(
 
     # Construct integration branch name
     integration_branch_name = common.integration_branch_name(
-        submodule_project.name, integration_branch_suffix
+        submodule_project.name, integration_branch_suffix, branch
     )
 
     # Construct commit message
@@ -328,7 +331,7 @@ def update_submodule_and_include_ref(
             clone_branch = integration_branch_name
 
         # Actually clone
-        project_repo, submodule_project = clone_project_and_submodule(
+        project_repo, submodule_project, project_dir = clone_project_and_submodule(
             project, submodule_name, clone_branch
         )
 
@@ -383,14 +386,19 @@ def update_submodule_and_include_ref(
             integration_commit = common.commit_and_push(
                 project,
                 project_repo,
-                integration_branch_name,
                 message,
                 gitlab.user.username,
                 gitlab.user.email,
                 less_verbose=True,
             )
 
-    return project_repo, integration_branch_name, integration_commit, message
+    return (
+        project_repo,
+        project_dir,
+        integration_branch_name,
+        integration_commit,
+        message,
+    )
 
 
 def main():
-- 
GitLab