From 61eb3e11ff3da4568285272cd932dbef0debff7d Mon Sep 17 00:00:00 2001
From: Tim Jaacks <tim.jaacks@seco.com>
Date: Mon, 28 Aug 2023 13:04:43 +0200
Subject: [PATCH] Switch all strings to formatted string literals (f-strings)

The old printf-style string formatting syntax using '%' is not
recommended anymore. See for reference:
https://docs.python.org/3/library/stdtypes.html#printf-style-string-formatting
https://docs.sourcery.ai/Reference/Python/Default-Rules/replace-interpolation-with-fstring/

Python 3.6 introduced f-strings as a better alternative:
https://docs.python.org/3/reference/lexical_analysis.html#f-strings

Consequently switch to this new syntax for all strings.
---
 scripts/.sourcery.yaml                        |  3 -
 scripts/accept_merge_request.py               | 14 ++--
 scripts/alphaplan_fwr_import.py               | 12 +--
 scripts/alphaplan_keys.py                     | 53 +++++--------
 scripts/analyze_job_logs.py                   |  6 +-
 scripts/cancel_pipelines.py                   |  4 +-
 scripts/changelog_generator.py                | 31 +++-----
 ...eck_if_integration_branch_is_up_to_date.py | 34 ++++-----
 scripts/check_yaml_value_length.py            | 11 +--
 scripts/common.py                             | 76 ++++++++-----------
 scripts/create_merge_request.py               |  4 +-
 scripts/delete_job_artifacts.py               |  5 +-
 scripts/deploy_gitlab_ci.py                   | 65 +++++++---------
 scripts/download_job_artifacts.py             | 10 +--
 scripts/generate_alphaplan_fwr_file.py        | 34 ++++-----
 scripts/generate_release_metadata.py          |  9 +--
 scripts/get_current_revision_from_manifest.py |  9 +--
 scripts/get_integration_sources.py            | 10 +--
 scripts/get_merge_requests.py                 |  3 +-
 scripts/gitlab_backup.py                      | 49 +++++-------
 scripts/integrate_into_manifest.py            | 34 ++++-----
 scripts/lava_api.py                           | 44 +++++------
 scripts/lava_create_testreport.py             | 15 ++--
 scripts/lava_credentials.py                   | 10 +--
 scripts/markdown_generator.py                 | 22 +++---
 scripts/merge_into_manifest.py                | 13 ++--
 scripts/mirror_mr_pipeline.py                 | 13 ++--
 scripts/package_release.py                    | 26 +++----
 scripts/pylintrc                              | 11 ++-
 scripts/render_jinja2_template.py             |  6 +-
 scripts/retrigger_integrating_projects.py     |  9 +--
 scripts/retrigger_pipeline_jobs.py            |  8 +-
 scripts/submit_test.py                        | 34 ++++-----
 scripts/update_submodule.py                   | 58 +++++++-------
 34 files changed, 321 insertions(+), 424 deletions(-)

diff --git a/scripts/.sourcery.yaml b/scripts/.sourcery.yaml
index b5c068f1..b369b009 100644
--- a/scripts/.sourcery.yaml
+++ b/scripts/.sourcery.yaml
@@ -12,9 +12,6 @@ rule_settings:
   - remove-zero-from-range
   # FIXME: verify if we want to keep the checks below.
   # If not, remove them. If yes, move them above this comment.
-  - use-fstring-for-formatting
-  - replace-interpolation-with-fstring
-  - use-fstring-for-concatenation
   - low-code-quality
   - for-append-to-extend
   - raise-specific-error
diff --git a/scripts/accept_merge_request.py b/scripts/accept_merge_request.py
index 5411e916..b7547db4 100755
--- a/scripts/accept_merge_request.py
+++ b/scripts/accept_merge_request.py
@@ -30,7 +30,7 @@ def accept_merge_request(project, mr, rebase=False, should_remove_source_branch=
             )
             mr = updated_mr
         except GitlabGetError as e:
-            print("WARNING: Could not update merge request object: %s" % e)
+            print(f"WARNING: Could not update merge request object: {e}")
 
         # Try to merge the merge request
         try:
@@ -41,15 +41,15 @@ def accept_merge_request(project, mr, rebase=False, should_remove_source_branch=
                 merged = True
             else:
                 if mr.merge_error:
-                    print("Merge error: %s" % mr.merge_error)
+                    print(f"Merge error: {mr.merge_error}")
                 else:
-                    print("Merge reported success, but MR state is '%s'" % mr.state)
+                    print(f"Merge reported success, but MR state is '{mr.state}'")
                 return False, mr.sha
 
         except GitlabMRClosedError as e:
             # See HTTP error codes for merge requests here:
             # https://docs.gitlab.com/ce/api/merge_requests.html#accept-mr
-            logging.debug("Error from gitlab: %d", e.response_code)
+            logging.debug(f"Error from gitlab: {e.response_code}")
 
             if e.response_code == 405:
                 # Not allowed (draft, closed, pipeline pending or failed)
@@ -94,14 +94,14 @@ def accept_merge_request(project, mr, rebase=False, should_remove_source_branch=
                 print("Trying to rebase...")
                 mr = common.rebase_merge_request(project, mr)
                 if mr.merge_error:
-                    print("ERROR: rebase not possible\n'%s'" % mr.merge_error)
+                    print(f"ERROR: rebase not possible\n'{mr.merge_error}'")
                     sys.exit(critical_error)
                 print("Sucessfully rebased")
 
             else:
                 if pipeline_pending:
                     print("")
-                print("ERROR: merge not possible: %s" % e)
+                print(f"ERROR: merge not possible: {e}")
                 sys.exit(critical_error)
 
     return True, mr.sha
@@ -151,7 +151,7 @@ def main():
             id=args.merge_request, retry_transient_errors=True
         )
     except GitlabGetError as e:
-        sys.exit("Could not get merge request: %s" % e)
+        sys.exit(f"Could not get merge request: {e}")
 
     if accept_merge_request(project, merge_request, rebase=args.rebase):
         print("Successfully merged")
diff --git a/scripts/alphaplan_fwr_import.py b/scripts/alphaplan_fwr_import.py
index 93a95d8d..88433345 100755
--- a/scripts/alphaplan_fwr_import.py
+++ b/scripts/alphaplan_fwr_import.py
@@ -14,7 +14,7 @@ def ap_send_json(jsonobj: dict, url: str, user: str, password: str, cert_file: s
             url, json=jsonobj, auth=(user, password), verify=cert_file, timeout=10
         )
     except requests.exceptions.RequestException as e:
-        sys.exit("ERROR: %s" % e)
+        sys.exit(f"ERROR: {e}")
 
     try:
         msg_json = msg.json()
@@ -24,7 +24,7 @@ def ap_send_json(jsonobj: dict, url: str, user: str, password: str, cert_file: s
     if msg_json["Status"] != "Ok":
         sys.exit("ERROR: AlphaPlan webservice post request failed")
 
-    print("AlphaPlan webservice response: {}".format(msg_json["Meldung"]))
+    print(f'AlphaPlan webservice response: {msg_json["Meldung"]}')
 
 
 def main():
@@ -63,18 +63,18 @@ def main():
 
     files = glob.glob(args.file, recursive=True)
     if not files:
-        sys.exit("ERROR: no file(s) matching '%s' found" % args.file)
+        sys.exit(f"ERROR: no file(s) matching '{args.file}' found")
 
-    print("Sending data to Alphaplan FWR webservice at %s" % args.url)
+    print(f"Sending data to Alphaplan FWR webservice at {args.url}")
 
     # Get files from passed glob
     for filename in files:
-        print("Importing JSON file %s" % filename)
+        print(f"Importing JSON file {filename}")
         with open(filename, "r", encoding="utf-8") as f:
             try:
                 json_data = json.load(f)
             except json.decoder.JSONDecodeError:
-                sys.exit("ERROR: Could not parse JSON data from %f" % filename)
+                sys.exit(f"ERROR: Could not parse JSON data from {filename}")
 
             ap_send_json(
                 json_data,
diff --git a/scripts/alphaplan_keys.py b/scripts/alphaplan_keys.py
index 33052801..c643b88c 100755
--- a/scripts/alphaplan_keys.py
+++ b/scripts/alphaplan_keys.py
@@ -36,8 +36,8 @@ def get_ap_dict(machine, machine_ap, release_name_local):
         ApKeys.YOCTO_PKG_PY: {
             ApSubKeys.MATCH: "pkg.py",
             ApSubKeys.MATCHCODE: "FNGUpdate",
-            ApSubKeys.BEZEICHNUNG: "{} Flash-N-Go Update general pkg.py update"
-            "script for nonverbose fng-install.sh".format(machine_ap),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} Flash-N-Go Update general pkg.py "
+            "update script for nonverbose fng-install.sh",
             ApSubKeys.LANGTEXT: "To be used with packages the contain an "
             "fng-install.sh.\n"
             "* with --nonverbose mode (new output)\n"
@@ -49,19 +49,15 @@ def get_ap_dict(machine, machine_ap, release_name_local):
         ApKeys.YOCTO_FNG_INSTALL: {
             ApSubKeys.MATCH: "fng-install.sh",
             ApSubKeys.MATCHCODE: "InstallScript",
-            ApSubKeys.BEZEICHNUNG: "{} {} Install Script".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} {release_name_local} Install Script",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "US",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
         },
         ApKeys.YOCTO_FS: {
-            ApSubKeys.MATCH: "{}.tar.gz".format(machine),
+            ApSubKeys.MATCH: f"{machine}.tar.gz",
             ApSubKeys.MATCHCODE: "OS-Filesystem",
-            ApSubKeys.BEZEICHNUNG: "{} {} Filesystem".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} {release_name_local} Filesystem",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "FS",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
@@ -69,9 +65,7 @@ def get_ap_dict(machine, machine_ap, release_name_local):
         ApKeys.FNGSYS_UPDATE: {
             ApSubKeys.MATCH: "fngsystem-self-update.sh",
             ApSubKeys.MATCHCODE: "TFTP",
-            ApSubKeys.BEZEICHNUNG: "{} {} Self Update".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} {release_name_local} Self Update",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "TFTP",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
@@ -79,29 +73,23 @@ def get_ap_dict(machine, machine_ap, release_name_local):
         ApKeys.FNGSYS_INIT: {
             ApSubKeys.MATCH: "fngsystem-self-init.sh",
             ApSubKeys.MATCHCODE: "InstallScript",
-            ApSubKeys.BEZEICHNUNG: "{} {} Init Script".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} {release_name_local} Init Script",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "Updateskript",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
         },
         ApKeys.FNGSYS_FS: {
-            ApSubKeys.MATCH: "{}.tgz".format(machine),
+            ApSubKeys.MATCH: f"{machine}.tgz",
             ApSubKeys.MATCHCODE: "FS",
-            ApSubKeys.BEZEICHNUNG: "{} {} Filesystem".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} {release_name_local} Filesystem",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "FS",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
         },
         ApKeys.FNGSYS_CHECKSUM: {
-            ApSubKeys.MATCH: "{}.md5".format(machine),
+            ApSubKeys.MATCH: f"{machine}.md5",
             ApSubKeys.MATCHCODE: "TFTP",
-            ApSubKeys.BEZEICHNUNG: "{} {} Checksum".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} {release_name_local} Checksum",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "TFTP",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
@@ -109,9 +97,8 @@ def get_ap_dict(machine, machine_ap, release_name_local):
         ApKeys.FNGSYS_UBOOT_UPDATE: {
             ApSubKeys.MATCH: "fng-install-uboot.sh",
             ApSubKeys.MATCHCODE: "US",
-            ApSubKeys.BEZEICHNUNG: "{} U-Boot {} Update script".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} U-Boot {release_name_local} "
+            "Update script",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "Updateskript",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
@@ -119,9 +106,8 @@ def get_ap_dict(machine, machine_ap, release_name_local):
         ApKeys.FNGSYS_UBOOT_IMAGE: {
             ApSubKeys.MATCH: "imx-boot",
             ApSubKeys.MATCHCODE: "FS",
-            ApSubKeys.BEZEICHNUNG: "{} U-Boot {} Bootloader Image".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} U-Boot {release_name_local} "
+            "Bootloader Image",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "FS",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
@@ -129,9 +115,8 @@ def get_ap_dict(machine, machine_ap, release_name_local):
         ApKeys.FNGSYS_UBOOT_IMAGETAR: {
             ApSubKeys.MATCH: "imx-boot.tar.gz",
             ApSubKeys.MATCHCODE: "FS",
-            ApSubKeys.BEZEICHNUNG: "{} U-Boot {} Bootloader Image".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} U-Boot {release_name_local} "
+            "Bootloader Image",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "FS",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
@@ -139,9 +124,7 @@ def get_ap_dict(machine, machine_ap, release_name_local):
         ApKeys.FNGSYS_UBOOT_CHECKSUM: {
             ApSubKeys.MATCH: "imx-boot.md5",
             ApSubKeys.MATCHCODE: "TFTP",
-            ApSubKeys.BEZEICHNUNG: "{} U-Boot {} Checksum".format(
-                machine_ap, release_name_local
-            ),
+            ApSubKeys.BEZEICHNUNG: f"{machine_ap} U-Boot {release_name_local} Checksum",
             ApSubKeys.LANGTEXT: "",
             ApSubKeys.TYP: "TFTP",
             ApSubKeys.ATTRIBUTESET: "Firmware, Bestandteil eines SW-Paketes",
diff --git a/scripts/analyze_job_logs.py b/scripts/analyze_job_logs.py
index 4872146d..67644ec7 100755
--- a/scripts/analyze_job_logs.py
+++ b/scripts/analyze_job_logs.py
@@ -99,11 +99,7 @@ def main(args):
         log = bytes.decode(job.trace())
         if options.pattern in log:
             logging.debug(
-                "Found %s: %s %s. Total %d",
-                job.name,
-                job.status,
-                str(job.tag_list),
-                len(jobs),
+                f"Found {job.name}: {job.status} {str(job.tag_list)}. Total {len(jobs)}"
             )
             jobs.append(job)
             print(
diff --git a/scripts/cancel_pipelines.py b/scripts/cancel_pipelines.py
index 30f0117c..b861b48c 100755
--- a/scripts/cancel_pipelines.py
+++ b/scripts/cancel_pipelines.py
@@ -110,8 +110,8 @@ def main():
     project = common.get_project(gitlab, args.project)
 
     print(
-        "Searching for pipelines in project '%s' on ref '%s' with IDs below %d"
-        % (args.project, args.ref, args.below_pipeline_id)
+        f"Searching for pipelines in project '{args.project}' on ref '{args.ref}' "
+        f"with IDs below {args.below_pipeline_id}"
     )
 
     cancelled_pipelines = cancel_pipelines(project, args.ref, args.below_pipeline_id)
diff --git a/scripts/changelog_generator.py b/scripts/changelog_generator.py
index 5afce6ad..fdb56a98 100755
--- a/scripts/changelog_generator.py
+++ b/scripts/changelog_generator.py
@@ -43,12 +43,10 @@ class Project:
         self.project = project
 
     def __str__(self):
-        return "## Project " + self.project.name + "\n"
+        return f"## Project {self.project.name}\n"
 
     def withlink(self):
-        return (
-            "\n\n## Project [" + self.project.name + "](" + self.project.web_url + ")\n"
-        )
+        return f"\n\n## Project [{self.project.name}]({self.project.web_url})\n"
 
     def __eq__(self, p):
         return self.project.id == p.project.id if p else False
@@ -74,10 +72,10 @@ class Tag:
         with V5 Api: https://docs.gitlab.com/ee/api/commits.html#list-merge-requests-associated-with-a-commit
         """
         self.mergerequest = None
-        logging.debug(self.name + "  -- " + self.commit["id"])
+        logging.debug(f"{self.name}  -- {self.commit['id']}")
 
     def __str__(self):
-        return self.name + " " + self.timestamp.strftime(TIMEFORMAT)
+        return f"{self.name} {self.timestamp.strftime(TIMEFORMAT)}"
 
     def add_mergerequest(self, m):
         if self.mergerequest:
@@ -89,18 +87,15 @@ class Tag:
             # as the commit, so the merged_at date is relevant. Otherwise the tagged commit and may be
             # more end up in the wrong release
             new_timestamp = decode_timestamp(self.mergerequest.mr.merged_at)
-            logging.debug("Found matching merge request for %s", self)
-            logging.debug(" - %s", self.timestamp.strftime(TIMEFORMAT))
-            logging.debug(" - %s", new_timestamp.strftime(TIMEFORMAT))
+            logging.debug(f"Found matching merge request for {self}")
+            logging.debug(f" - {self.timestamp.strftime(TIMEFORMAT)}")
+            logging.debug(f" - {new_timestamp.strftime(TIMEFORMAT)}")
             self.timestamp = new_timestamp
 
     def header(self):
         return (
-            "\n\n\n# Release "
-            + self.name
-            + "\n\nreleased at "
-            + self.timestamp.strftime(TIMEFORMAT)
-            + "\n\n"
+            f"\n\n\n# Release {self.name}"
+            f"\n\nreleased at {self.timestamp.strftime(TIMEFORMAT)}\n\n"
         )
 
 
@@ -113,7 +108,7 @@ class DummyTag:
         self.timestamp = date
 
     def header(self):
-        return "\n\n\n# " + self.name + "\n\n"
+        return f"\n\n\n# {self.name}\n\n"
 
     def add_mergerequest(self, m):
         # Needed as interface but does nothing
@@ -183,7 +178,7 @@ class MergeRequest:
         return self.mr.title
 
     def withlink(self):
-        return self.mr.title + " [" + self.mr.reference + "](" + self.mr.web_url + ")"
+        return f"{self.mr.title} [{self.mr.reference}]({self.mr.web_url})"
 
 
 def main(args):
@@ -243,9 +238,7 @@ def main(args):
         Release(
             DummyTag(
                 "Not yet released",
-                "Merge Requests already merged into "
-                + options.branch
-                + " but not yet released.",
+                f"Merge Requests already merged into {options.branch} but not yet released.",
             )
         )
     )
diff --git a/scripts/check_if_integration_branch_is_up_to_date.py b/scripts/check_if_integration_branch_is_up_to_date.py
index 009048a8..7e58a451 100755
--- a/scripts/check_if_integration_branch_is_up_to_date.py
+++ b/scripts/check_if_integration_branch_is_up_to_date.py
@@ -20,9 +20,8 @@ def check_if_integration_branch_is_up_to_date(
         )
     except GitlabGetError:
         sys.exit(
-            "ERROR: could not find integration branch {} in {}.".format(
-                integration_branch_name, target_project.name
-            )
+            f"ERROR: could not find integration branch {integration_branch_name} in "
+            f"{target_project.name}."
         )
 
     try:
@@ -31,9 +30,8 @@ def check_if_integration_branch_is_up_to_date(
         )
     except GitlabGetError:
         sys.exit(
-            "ERROR: could not find target branch {} in {}.".format(
-                target_branch_name, target_project.name
-            )
+            f"ERROR: could not find target branch {target_branch_name} in "
+            f"{target_project.name}."
         )
 
     # Loop over the commits until the integration_branch head id is found
@@ -101,9 +99,7 @@ def main():
     source_project = common.get_project(gitlab, args.source_project)
     merge_request = common.get_merge_request(source_project, args.merge_request)
     if merge_request is None:
-        sys.exit(
-            "ERROR: could not get %s  %s" % (source_project.name, args.merge_request)
-        )
+        sys.exit(f"ERROR: could not get {source_project.name}  {args.merge_request}")
 
     integration_branch_name = common.integration_branch_name(
         source_project.path, merge_request.source_branch, args.target_branch
@@ -115,21 +111,17 @@ def main():
         integration_branch_name=integration_branch_name,
     ):
         print(
-            "Integration branch {} in {} is up to date.".format(
-                integration_branch_name, target_project.name
-            )
+            f"Integration branch {integration_branch_name} in {target_project.name} "
+            f"is up to date."
         )
     else:
         sys.exit(
-            "Integration branch {} in {} is not up to date.\n"
-            "Please re-run the MR pipeline:\n"
-            "  1. Open the MR pipelines page:\n"
-            "     {}\n"
-            "  2. Click 'Run Pipeline'".format(
-                integration_branch_name,
-                target_project.name,
-                merge_request.web_url + "/pipelines",
-            )
+            f"Integration branch {integration_branch_name} in {target_project.name} "
+            f"is not up to date.\n"
+            f"Please re-run the MR pipeline:\n"
+            f"  1. Open the MR pipelines page:\n"
+            f"     {merge_request.web_url}/pipelines\n "
+            f"  2. Click 'Run Pipeline'"
         )
 
 
diff --git a/scripts/check_yaml_value_length.py b/scripts/check_yaml_value_length.py
index 1384e207..eff30760 100755
--- a/scripts/check_yaml_value_length.py
+++ b/scripts/check_yaml_value_length.py
@@ -31,14 +31,9 @@ def check_value_length(filename: str, data: OrderedDict, keys: list[str], limit:
             if count > limit:
                 exceeded = True
                 print(
-                    colors.fg.purple
-                    + filename
-                    + ":"
-                    + colors.fg.green
-                    + str(data[key].lc.line)
-                    + colors.reset
-                    + " value of '%s' exceeds character limit (%d > %d)"
-                    % (key, count, limit)
+                    f"{colors.fg.purple}{filename}:{colors.fg.green}"
+                    f"{str(data[key].lc.line)}{colors.reset}"
+                    f" value of '{key}' exceeds character limit ({count} > {limit})"
                 )
         if type(value) is CommentedMap:
             exceeded = check_value_length(filename, value, keys, limit) | exceeded
diff --git a/scripts/common.py b/scripts/common.py
index 040f1a5c..4dd99c4f 100755
--- a/scripts/common.py
+++ b/scripts/common.py
@@ -22,14 +22,7 @@ PENDING_STATES = ["created", "waiting_for_resource", "preparing", "pending", "ru
 
 def integration_branch_name(project_name, source_branch_name, target_branch_name):
     """Get integration branch name"""
-    return (
-        "integrate/"
-        + project_name.lower()
-        + "/"
-        + source_branch_name
-        + "/into/"
-        + target_branch_name
-    )
+    return f"integrate/{project_name.lower()}/{source_branch_name}/into/{target_branch_name}"
 
 
 def is_gitlab_ci_integration_branch(branch_name):
@@ -47,19 +40,19 @@ def find_gitlab_ci_integration_branch(repo: Repo, branch_name):
     if not is_gitlab_ci_integration_branch(branch_name):
         return None
 
-    logging.debug("Integration of gitlab-ci: %s", branch_name)
+    logging.debug(f"Integration of gitlab-ci: {branch_name}")
     for ref in repo.references:
         refname = ref.name
-        logging.debug("Found ref: %s", refname)
+        logging.debug(f"Found ref: {refname}")
         if not refname.startswith("origin/"):
             continue
         # remove 'origin/' from the ref before compare
         refname = ref.name.split("/", 1)[1]
-        logging.debug("Splitted refname: %s", refname)
+        logging.debug(f"Splitted refname: {refname}")
         if branch_name == refname:
             integration_branch = refname
             logging.debug(
-                "Found integration branch for gitlab-ci: %s", integration_branch
+                f"Found integration branch for gitlab-ci: {integration_branch}"
             )
             return integration_branch
 
@@ -81,7 +74,7 @@ def get_project(gitlab, project_name):
                 if p.name == project_name:
                     project = p
         if not project:
-            sys.exit("ERROR: project '%s' not found" % project_name)
+            sys.exit(f"ERROR: project '{project_name}' not found")
     except requests.ConnectionError:
         sys.exit("ERROR: could not connect to GitLab server")
     except GitlabAuthenticationError:
@@ -95,13 +88,10 @@ def get_latest_commit(project, branch_name):
         branch = project.branches.get(branch_name, retry_transient_errors=True)
     except GitlabGetError as e:
         sys.exit(
-            "ERROR: could not get branch '%s' for project '%s': %s"
-            % (branch_name, project.name, e)
+            f"ERROR: could not get branch '{branch_name}' for project '{project.name}': {e}"
         )
     if not branch:
-        sys.exit(
-            "ERROR: branch '%s' not found in project %s" % (branch_name, project.name)
-        )
+        sys.exit(f"ERROR: branch '{branch_name}' not found in project {project.name}")
     return branch.commit
 
 
@@ -112,7 +102,7 @@ def rebase_merge_request(project, merge_request):
     try:
         merge_request.rebase()
     except GitlabMRRebaseError as e:
-        merge_request.merge_error = "Could not rebase merge request: %s" % e
+        merge_request.merge_error = f"Could not rebase merge request: {e}"
         return merge_request
     rebase_in_progress = True
     while rebase_in_progress:
@@ -124,7 +114,7 @@ def rebase_merge_request(project, merge_request):
                 retry_transient_errors=True,
             )
         except GitlabGetError as e:
-            merge_request.merge_error = "Could not get updated merge request: %s" % e
+            merge_request.merge_error = f"Could not get updated merge request: {e}"
             return merge_request
         rebase_in_progress = updated_merge_request.rebase_in_progress
     return updated_merge_request
@@ -132,11 +122,9 @@ def rebase_merge_request(project, merge_request):
 
 def crosslink_merge_requests(source_mr: MergeRequest, integration_mr: MergeRequest):
     """Insert cross-links in merge requests"""
-    integration_mr.notes.create(
-        {"body": "Source merge request: %s" % source_mr.web_url}
-    )
+    integration_mr.notes.create({"body": f"Source merge request: {source_mr.web_url}"})
     source_mr.notes.create(
-        {"body": "Integration merge request: %s" % integration_mr.web_url}
+        {"body": f"Integration merge request: {integration_mr.web_url}"}
     )
 
 
@@ -149,14 +137,13 @@ def wait_until_merge_status_is_set(project: Project, mr: MergeRequest):
         print(".", end="", flush=True)
         time.sleep(1)
         mr = project.mergerequests.get(mr.iid, retry_transient_errors=True)
-    print(" -> %s" % mr.merge_status)
+    print(f" -> {mr.merge_status}")
 
 
 def list_commits(commits):
     """Create a list of commits along with the commit messages"""
     return "".join(
-        "\n--\n\nCommit: %s\n\n%s" % (commit.web_url, commit.message)
-        for commit in commits
+        f"\n--\n\nCommit: {commit.web_url}\n\n{commit.message}" for commit in commits
     )
 
 
@@ -178,21 +165,20 @@ def commit_and_push(
     # Push commit
     try:
         origin = repo.remote("origin")
-        logging.debug("Push branch %s to %s", branch, origin)
+        logging.debug(f"Push branch {branch} to {origin}")
         origin.push(branch, force=True).raise_if_error()
     except GitCommandError as e:
-        sys.exit("ERROR: could not push branch %s to %s\n" % (branch, origin) + str(e))
+        sys.exit(f"ERROR: could not push branch {branch} to {origin}\n{e}")
 
     # Print commit information
     revision = repo.head.commit.hexsha
     print("Pushed new commit:")
-    print(project.web_url + "/-/commit/" + revision)
+    print(f"{project.web_url}/-/commit/{revision}")
     if not less_verbose:
         print(
-            colors.fg.lightgrey
-            + repo.git.show("--summary", "--decorate")
-            + colors.reset
-            + "\n"
+            f"{colors.fg.lightgrey}"
+            f"{repo.git.show('--summary', '--decorate')}"
+            f"{colors.reset}\n"
         )
 
     return revision
@@ -205,7 +191,7 @@ def get_submodule(repo: Repo, submodule_name):
         if sm.name == submodule_name:
             submodule = sm
     if submodule is None:
-        sys.exit("ERROR: submodule '%s' not found" % submodule_name)
+        sys.exit(f"ERROR: submodule '{submodule_name}' not found")
     return submodule
 
 
@@ -228,7 +214,7 @@ def get_merge_request(project: Project, merge_request):
     # seco-ne/yocto/infrastructure/ci-test/minimal-bar!115
     if "!" in merge_request:
         merge_request = int(merge_request.split("!")[-1])
-        logging.debug("Number of MR: %d", merge_request)
+        logging.debug(f"Number of MR: {merge_request}")
     try:
         mr = project.mergerequests.get(merge_request, retry_transient_errors=True)
     except GitlabGetError:
@@ -252,16 +238,16 @@ def clone_project(project: Project, into, branch=None):
     try:
         repo = Repo.clone_from(clone_url.url, into, branch=branch, depth=1)
     except GitCommandError as e:
-        raise Exception("could not clone repository\n" + str(e)) from e
+        raise Exception(f"could not clone repository\n{e}") from e
     except IndexError as e:
-        raise Exception("branch '%s' not found" % branch) from e
+        raise Exception(f"branch '{branch}' not found") from e
     return repo
 
 
 def get_repository_file_raw(project: Project, filename, ref=None):
     # TODO tree objects are not supported
     fileobj = get_repository_file_obj(project, filename, ref)
-    logging.debug("Read file '%s' from '%s' at ref %s", filename, project.name, ref)
+    logging.debug(f"Read file '{filename}' from '{project.name}' at ref {ref}")
     return project.repository_raw_blob(
         fileobj["id"], retry_transient_errors=True
     ).decode()
@@ -271,7 +257,7 @@ def get_repository_file_obj(project: Project, filename, ref=None):
     # TODO tree objects are not supported
     if ref is None:
         ref = project.default_branch
-        logging.debug("Using default branch %s", ref)
+        logging.debug(f"Using default branch {ref}")
 
     repository_tree = project.repository_tree(
         ref=ref, all=True, retry_transient_errors=True
@@ -280,7 +266,7 @@ def get_repository_file_obj(project: Project, filename, ref=None):
     fileobj = [f for f in repository_tree if f["name"] == filename]
 
     if not fileobj:
-        logging.error("Could not find file %s", filename)
+        logging.error(f"Could not find file {filename}")
         for f in repository_tree:
             logging.debug(f["name"])
         return None
@@ -299,7 +285,7 @@ def is_commit_parent_of_project_commit(
         _ = project.commits.get(commit, retry_transient_errors=True)
     except GitlabGetError as e:
         raise Exception(
-            "Failed to find commit {} in {}".format(project_commit, project.name)
+            f"Failed to find commit {project_commit} in {project.name}"
         ) from e
 
     # Loop over the parent commits until commit is found
@@ -308,18 +294,18 @@ def is_commit_parent_of_project_commit(
     while True:
         count = count + 1
         if limit is not None and count >= limit:
-            logging.debug("Check %d commits and did not found a match.", count)
+            logging.debug(f"Check {count} commits and did not found a match.")
             return False
 
         try:
             parent = project.commits.get(parent_id, retry_transient_errors=True)
         except GitlabGetError as e:
             raise Exception(
-                "Failed to find commit {} in {}".format(parent_id, project.name)
+                f"Failed to find commit {parent_id} in {project.name}"
             ) from e
 
         # The integration branch is up to date if its parent is the integration base
-        logging.debug("Compare '%s' and '%s'", parent.id, commit)
+        logging.debug(f"Compare '{parent.id}' and '{commit}'")
         if parent.id == commit:
             return True
         if len(parent.parent_ids) == 0:
diff --git a/scripts/create_merge_request.py b/scripts/create_merge_request.py
index 20f86c7b..b7d3cb95 100755
--- a/scripts/create_merge_request.py
+++ b/scripts/create_merge_request.py
@@ -25,11 +25,11 @@ def create_merge_request(project: Project, source_branch, target_branch):
     try:
         sb = project.branches.get(source_branch, retry_transient_errors=True)
     except GitlabGetError:
-        sys.exit("ERROR: source branch '%s' does not exist." % source_branch)
+        sys.exit(f"ERROR: source branch '{source_branch}' does not exist.")
     try:
         project.branches.get(target_branch, retry_transient_errors=True)
     except GitlabGetError:
-        sys.exit("ERROR: target branch '%s' does not exist." % target_branch)
+        sys.exit(f"ERROR: target branch '{target_branch}' does not exist.")
 
     # Get commit of source branch
     commit = project.commits.get(sb.attributes["commit"]["id"])
diff --git a/scripts/delete_job_artifacts.py b/scripts/delete_job_artifacts.py
index 3aa7dc67..413499cf 100755
--- a/scripts/delete_job_artifacts.py
+++ b/scripts/delete_job_artifacts.py
@@ -91,10 +91,7 @@ def main(args):
             continue
         job.delete_artifacts()
         logging.debug(
-            "Deleted artifacts for %s: %s %s.",
-            job.name,
-            job.status,
-            str(job.tag_list),
+            f"Deleted artifacts for {job.name}: {job.status} {str(job.tag_list)}."
         )
 
 
diff --git a/scripts/deploy_gitlab_ci.py b/scripts/deploy_gitlab_ci.py
index fc85fa02..bfdb0ba1 100755
--- a/scripts/deploy_gitlab_ci.py
+++ b/scripts/deploy_gitlab_ci.py
@@ -22,11 +22,11 @@ def read_keys_from_gitlab_ci_yml(gitlab_ci_yml):
     # Read values from existing file
     yaml = YAML()
     data = yaml.load(gitlab_ci_yml)
-    logging.debug("Yaml: %s", data)
+    logging.debug(f"Yaml: {data}")
 
     try:
         recipe = data["variables"]["BB_RECIPE_NAME"]
-        logging.debug("Recipe %s", recipe)
+        logging.debug(f"Recipe {recipe}")
     except KeyError:
         recipe = None
     return {"recipe": recipe}
@@ -72,9 +72,7 @@ def integrate_submodule_into(
         "message": message,
     }
     logging.debug(
-        "Integration branch: %s (%s)",
-        integration_branch_name,
-        integration_commit,
+        f"Integration branch: {integration_branch_name} ({integration_commit})"
     )
     return ret
 
@@ -91,9 +89,9 @@ def create_integration_merge_request(
     if created:
         if source_mr is not None:
             common.crosslink_merge_requests(source_mr, mr)
-        print("Created new merge request:\n%s" % mr.web_url)
+        print(f"Created new merge request:\n{mr.web_url}")
     else:
-        print("Existing integration merge request:\n%s" % mr.web_url)
+        print(f"Existing integration merge request:\n{mr.web_url}")
     return mr
 
 
@@ -205,8 +203,8 @@ def main():
     all_integration_sources = []
     for manifest_branch in manifest_branches:
         print(
-            "Searching for projects in %s that are configured for automatic integration into %s:%s"
-            % (args.group, args.manifest_project, manifest_branch)
+            f"Searching for projects in {args.group} that are configured for automatic "
+            f"integration into {args.manifest_project}:{manifest_branch}"
         )
         integration_sources[manifest_branch] = get_integration_sources(
             args.manifest_project, manifest_branch, group
@@ -218,7 +216,7 @@ def main():
     # Update submodule in all integration sources
     project_integrations = []
     for s in all_integration_sources:
-        print("Create integration commit in %s:%s" % (s["project"], s["branch"]))
+        print(f'Create integration commit in {s["project"]}:{s["branch"]}')
 
         integration = integrate_submodule_into(
             gitlab, s["project"], args.submodule, args.revision, s["branch"]
@@ -231,10 +229,7 @@ def main():
     # Update submodule in all manifest branches
     manifest_integrations = []
     for manifest_branch in manifest_branches:
-        print(
-            "Create integration commit in %s:%s"
-            % (args.manifest_project, manifest_branch),
-        )
+        print(f"Create integration commit in {args.manifest_project}:{manifest_branch}")
         manifest_integrations.append(
             integrate_submodule_into(
                 gitlab,
@@ -261,13 +256,12 @@ def main():
         )
         if not mrs:
             sys.exit(
-                "ERROR: could not determine source merge request for commit %s"
-                % args.revision
+                f"ERROR: could not determine source merge request for commit {args.revision}"
             )
         source_mr = mrs[0]
 
         for project_integration in project_integrations:
-            logging.debug("Create MR in %s", project_integration["project"].name)
+            logging.debug(f"Create MR in {project_integration['project'].name}")
             mr = create_integration_merge_request(
                 project_integration["project"],
                 project_integration["integration_branch"],
@@ -275,7 +269,7 @@ def main():
                 source_mr,
             )
             # Now merge
-            logging.debug("Merge %s!%s", project_integration["project"], mr.iid)
+            logging.debug(f"Merge {project_integration['project']}!{mr.iid}")
 
             # Wait until GitLab has checked merge status
             common.wait_until_merge_status_is_set(project_integration["project"], mr)
@@ -292,11 +286,10 @@ def main():
 
             if not merged:
                 sys.exit(
-                    "Integration MR could not be merged:\n"
-                    "%s\n"
-                    "This can probably be resolved by creating a new commit in "
-                    "gitlab-ci and merging it. The above MR can be closed then."
-                    % mr.web_url
+                    f"Integration MR could not be merged:\n"
+                    f"{mr.web_url}\n"
+                    f"This can probably be resolved by creating a new commit in "
+                    f"gitlab-ci and merging it. The above MR can be closed then."
                 )
 
     # =======================================================
@@ -307,14 +300,14 @@ def main():
         manifest_file_abs = os.path.join(
             manifest_integration["repo"].working_tree_dir, args.manifest_file
         )
-        logging.debug("Read manifest from: %s", manifest_file_abs)
+        logging.debug(f"Read manifest from: {manifest_file_abs}")
         with open(manifest_file_abs, "r", encoding="utf8") as fp:
             manifest = fp.read()
         logging.debug(manifest)
         srcrev_file_abs = os.path.join(
             manifest_integration["repo"].working_tree_dir, args.srcrev_file
         )
-        logging.debug("Read manifest from: %s", srcrev_file_abs)
+        logging.debug(f"Read manifest from: {srcrev_file_abs}")
         with open(srcrev_file_abs, "r", encoding="utf8") as fp:
             srcrev = fp.read()
         logging.debug(srcrev)
@@ -328,9 +321,8 @@ def main():
                     and source["branch"] == project_integration["master_branch"]
                 ):
                     logging.debug(
-                        "Update %s to %s",
-                        project_integration["project"].name,
-                        project_integration["commit"],
+                        f"Update {project_integration['project'].name} "
+                        f"to {project_integration['commit']}"
                     )
 
                     new_manifest = update_manifest(
@@ -362,8 +354,8 @@ def main():
                         logging.debug(srcrev)
                     else:
                         logging.debug(
-                            "Project %s not found in xml or srcrev file",
-                            project_integration["project"],
+                            f"Project {project_integration['project']} not found in "
+                            f"xml or srcrev file"
                         )
 
         # Write manifest
@@ -408,7 +400,7 @@ def main():
     # Create merge requests for the manifest
     # ============================================
     for integration in manifest_integrations:
-        logging.debug("Create MR in %s", integration["project"].name)
+        logging.debug(f"Create MR in {integration['project'].name}")
         mr = create_integration_merge_request(
             integration["project"],
             integration["integration_branch"],
@@ -419,7 +411,7 @@ def main():
         # Now merge it
         # =================================================
         # The manifest needs to be merged at last
-        logging.debug("Merge %s!%s", args.manifest_project, mr.iid)
+        logging.debug(f"Merge {args.manifest_project}!{mr.iid}")
 
         # Wait until GitLab has checked merge status
         common.wait_until_merge_status_is_set(integration["project"], mr)
@@ -429,11 +421,10 @@ def main():
 
         if not merged:
             sys.exit(
-                "Integration MR could not be merged:\n"
-                "%s\n"
-                "This can probably be resolved by creating a new commit in "
-                "gitlab-ci and merging it. The above MR can be closed then."
-                % mr.web_url
+                f"Integration MR could not be merged:\n"
+                f"{mr.web_url}\n"
+                f"This can probably be resolved by creating a new commit in "
+                f"gitlab-ci and merging it. The above MR can be closed then."
             )
 
         print("Successfully merged")
diff --git a/scripts/download_job_artifacts.py b/scripts/download_job_artifacts.py
index 007ba5f5..566a4258 100755
--- a/scripts/download_job_artifacts.py
+++ b/scripts/download_job_artifacts.py
@@ -148,7 +148,7 @@ def main(args):
             filename = download_job_artifacts(
                 gitlab, destination, options.job, options.project, extract=True
             )
-            print("Downloaded artifacts for job {} to {}".format(options.job, filename))
+            print(f"Downloaded artifacts for job {options.job} to {filename}")
         else:
             if options.destination is None:
                 destination = tempfile.TemporaryDirectory().name
@@ -158,13 +158,9 @@ def main(args):
             filename = download_job_artifact(
                 gitlab, destination, options.path, options.job, options.project
             )
-            print(
-                "Downloaded {} for job {} to {}".format(
-                    options.path, options.job, filename
-                )
-            )
+            print(f"Downloaded {options.path} for job {options.job} to {filename}")
     except gl.exceptions.GitlabGetError as e:
-        exit("ERROR: %s" % e)
+        exit(f"ERROR: {e}")
 
 
 if __name__ == "__main__":
diff --git a/scripts/generate_alphaplan_fwr_file.py b/scripts/generate_alphaplan_fwr_file.py
index 96af50c0..3b20ad5a 100755
--- a/scripts/generate_alphaplan_fwr_file.py
+++ b/scripts/generate_alphaplan_fwr_file.py
@@ -14,7 +14,7 @@ def ap_id_generator():
     if not hasattr(ap_id_generator, "value"):
         ap_id_generator.value = 0
     ap_id_generator.value += 1
-    return "Neu{}".format(ap_id_generator.value)
+    return f"Neu{ap_id_generator.value}"
 
 
 def new_ap_article(
@@ -91,7 +91,7 @@ def generate_ap_subarticle(
             break
 
     if ap_article is None:
-        sys.exit("ERROR: Can not find key:{} in files".format(ap_key))
+        sys.exit(f"ERROR: Can not find key:{ap_key} in files")
 
     return ap_article
 
@@ -138,8 +138,8 @@ def generate_fwr_articles(
         data = new_ap_article(
             "FWR",
             "SW-Paket",
-            "{} {}".format(machine_ap, release_name_ap),
-            "{}\n{}".format(release_name_ap, machine_ap),
+            f"{machine_ap} {release_name_ap}",
+            f"{release_name_ap}\n{machine_ap}",
             attb_set_major,
             attribute,
             stueckliste=stueckliste,
@@ -176,19 +176,18 @@ def generate_fwr_articles(
             data_uboot = new_ap_article(
                 "FWR",
                 "SW-Paket",
-                "{} U-Boot {}".format(machine_ap, release_name_ap),
-                "{}\n{}".format(release_name_ap, machine_ap),
+                f"{machine_ap} U-Boot {release_name_ap}",
+                f"{release_name_ap}\n{machine_ap}",
                 attb_set_major,
                 attribute_uboot,
                 stueckliste=stueckliste_uboot,
             )
 
             # Create an additional import file for uboot
-            jsonfile_uboot_name = "alphaplan-import-uboot-{}.json".format(machine)
+            jsonfile_uboot_name = f"alphaplan-import-uboot-{machine}.json"
             print(
-                'Saving Alphaplan data for FWR article "{} U-Boot {}" to {}'.format(
-                    machine_ap, release_name_ap, jsonfile_uboot_name
-                )
+                f'Saving Alphaplan data for FWR article "{machine_ap} U-Boot '
+                f'{release_name_ap}" to {jsonfile_uboot_name}'
             )
             with open(jsonfile_uboot_name, "w", encoding="utf-8") as jsonfile:
                 json.dump(data_uboot, jsonfile, indent=2)
@@ -211,18 +210,17 @@ def generate_fwr_articles(
         data = new_ap_article(
             "FWR",
             "SW-Paket",
-            "{} {}".format(machine_ap, release_name_ap),
-            "{}\n{}".format(release_name_ap, machine_ap),
+            f"{machine_ap} {release_name_ap}",
+            f"{release_name_ap}\n{machine_ap}",
             attb_set_major,
             attribute,
             stueckliste=stueckliste,
         )
 
-    jsonfile_name = "alphaplan-import-{}.json".format(machine)
+    jsonfile_name = f"alphaplan-import-{machine}.json"
     print(
-        'Saving Alphaplan data for FWR article "{} {}" to {}'.format(
-            machine_ap, release_name_ap, jsonfile_name
-        )
+        f'Saving Alphaplan data for FWR article "{machine_ap} {release_name_ap}" '
+        f"to {jsonfile_name}"
     )
     with open(jsonfile_name, "w", encoding="utf-8") as jsonfile:
         json.dump(data, jsonfile, indent=2)
@@ -265,14 +263,14 @@ def main():
     # Read file list
     files = []
     for files_file in glob.glob(args.files_list, recursive=True):
-        print("Reading files from %s" % files_file)
+        print(f"Reading files from {files_file}")
         with open(files_file, "r", encoding="utf-8") as f:
             files = files + f.read().splitlines()
 
     # Read md5 sums
     md5sums = {}
     for md5sums_file in glob.glob(args.md5sums, recursive=True):
-        print("Reading md5sums from %s" % md5sums_file)
+        print(f"Reading md5sums from {md5sums_file}")
         with open(md5sums_file, "r", encoding="utf-8") as f:
             for line in f:
                 # Assuming line format: "<md5sum>  <filename>\n"
diff --git a/scripts/generate_release_metadata.py b/scripts/generate_release_metadata.py
index a732c036..449a4527 100755
--- a/scripts/generate_release_metadata.py
+++ b/scripts/generate_release_metadata.py
@@ -26,9 +26,9 @@ def generate_metadata(
             install_script = filename
         elif filename == "license.manifest":
             licenses = filename
-        elif filename.endswith(machine + ".tar.gz"):
+        elif filename.endswith(f"{machine}.tar.gz"):
             image_general = filename
-        elif filename.endswith(machine + ".wic"):
+        elif filename.endswith(f"{machine}.wic"):
             image_wic = filename
 
     metadata = {
@@ -60,10 +60,7 @@ def generate_metadata(
         metadata["files"].append(new_file)
 
     if sdk is not None:
-        new_file = {
-            "name": "SDK",
-            "path": "sdk/" + sdk + ".sh",
-        }
+        new_file = {"name": "SDK", "path": f"sdk/{sdk}.sh"}
         metadata["files"].append(new_file)
 
     if licenses is not None:
diff --git a/scripts/get_current_revision_from_manifest.py b/scripts/get_current_revision_from_manifest.py
index c234a7ec..a6222fb2 100755
--- a/scripts/get_current_revision_from_manifest.py
+++ b/scripts/get_current_revision_from_manifest.py
@@ -25,7 +25,7 @@ def get_current_revision_from_manifest(
 
     if manifest_branch is None:
         manifest_branch = manifest_project.default_branch
-        logging.debug("Using default branch %s", manifest_project)
+        logging.debug(f"Using default branch {manifest_project}")
 
     # Get all manifest xml
     repository_tree = manifest_project.repository_tree(
@@ -46,7 +46,7 @@ def get_current_revision_from_manifest(
             manifest_project, file_name, ref=manifest_branch
         )
         if content is None:
-            logging.error("Failed to read %s.", file_name)
+            logging.error(f"Failed to read {file_name}.")
             continue
         logging.debug(content)
         revision = None
@@ -147,9 +147,8 @@ def main():
         srcrev_file=args.srcrev_file,
     )
     print(
-        "The manifest repo {} includes the project {} at revision {}".format(
-            manifest_project.name, project.name, current_revisions
-        )
+        f"The manifest repo {manifest_project.name} includes the project "
+        f"{project.name} at revision {current_revisions}"
     )
 
 
diff --git a/scripts/get_integration_sources.py b/scripts/get_integration_sources.py
index f4552ebf..1aab0689 100755
--- a/scripts/get_integration_sources.py
+++ b/scripts/get_integration_sources.py
@@ -49,11 +49,9 @@ def get_integration_sources(manifest_project: str, manifest_branch: str, group:
                 pass
             elif e.response_code == 403:  # forbidden
                 sys.exit(
-                    (
-                        "ERROR: could not get INTEGRATION variable of project %s\n"
-                        % project.path_with_namespace
-                    )
-                    + e.error_message
+                    f"ERROR: could not get INTEGRATION variable of project "
+                    f"{project.path_with_namespace}\n"
+                    f"{e.error_message}"
                 )
             else:
                 raise
@@ -106,7 +104,7 @@ def main():
     )
 
     for source in integration_sources:
-        print("%s:%s" % (source["project"], source["branch"]))
+        print(f"{source['project']}:{source['branch']}")
 
 
 if __name__ == "__main__":
diff --git a/scripts/get_merge_requests.py b/scripts/get_merge_requests.py
index 17eae3c1..64ace1e4 100755
--- a/scripts/get_merge_requests.py
+++ b/scripts/get_merge_requests.py
@@ -22,8 +22,7 @@ def get_merge_requests(
         )
     except GitlabGetError as e:
         sys.exit(
-            "ERROR: could not list merge requests for project '%s': %s"
-            % (project.name, e)
+            f"ERROR: could not list merge requests for project '{project.name}': {e}"
         )
     if commit:
         for mr in all_merge_requests:
diff --git a/scripts/gitlab_backup.py b/scripts/gitlab_backup.py
index 4ff7f0c6..2907bfc5 100755
--- a/scripts/gitlab_backup.py
+++ b/scripts/gitlab_backup.py
@@ -38,12 +38,10 @@ class Project:
         self.project = project
 
     def __str__(self):
-        return "## Project " + self.project.name + "\n"
+        return f"## Project {self.project.name}\n"
 
     def withlink(self):
-        return (
-            "\n\n## Project [" + self.project.name + "](" + self.project.web_url + ")\n"
-        )
+        return "\n\n## Project [{self.project.name}]({self.project.web_url})\n"
 
     def __eq__(self, p):
         return self.project.id == p.project.id if p else False
@@ -69,10 +67,10 @@ class Tag:
         with V5 Api: https://docs.gitlab.com/ee/api/commits.html#list-merge-requests-associated-with-a-commit
         """
         self.mergerequest = None
-        logging.debug(self.name + "  -- " + self.commit["id"])
+        logging.debug(f"{self.name}  -- {self.commit['id']}")
 
     def __str__(self):
-        return self.name + " " + self.timestamp.strftime(TIMEFORMAT)
+        return f"{self.name} {self.timestamp.strftime(TIMEFORMAT)}"
 
     def add_mergerequest(self, m):
         if self.mergerequest:
@@ -84,18 +82,15 @@ class Tag:
             # as the commit, so the merged_at date is relevant. Otherwise the tagged commit and may be
             # more end up in the wrong release
             new_timestamp = decode_timestamp(self.mergerequest.mr.merged_at)
-            logging.debug("Found matching merge request for %s", self)
-            logging.debug(" - %s", self.timestamp.strftime(TIMEFORMAT))
-            logging.debug(" - %s", new_timestamp.strftime(TIMEFORMAT))
+            logging.debug(f"Found matching merge request for {self}")
+            logging.debug(f" - {self.timestamp.strftime(TIMEFORMAT)}")
+            logging.debug(f" - {new_timestamp.strftime(TIMEFORMAT)}")
             self.timestamp = new_timestamp
 
     def header(self):
         return (
-            "\n\n\n# Release "
-            + self.name
-            + "\n\nreleased at "
-            + self.timestamp.strftime(TIMEFORMAT)
-            + "\n\n"
+            f"\n\n\n# Release {self.name}"
+            f"\n\nreleased at {self.timestamp.strftime(TIMEFORMAT)}\n\n"
         )
 
 
@@ -108,7 +103,7 @@ class DummyTag:
         self.timestamp = date
 
     def header(self):
-        return "\n\n\n# " + self.name + "\n\n"
+        return f"\n\n\n# {self.name}\n\n"
 
     def add_mergerequest(self, m):
         # Needed as interface but does nothing
@@ -178,7 +173,7 @@ class MergeRequest:
         return self.mr.title
 
     def withlink(self):
-        return self.mr.title + " [" + self.mr.reference + "](" + self.mr.web_url + ")"
+        return f"{self.mr.title} [{self.mr.reference}]({self.mr.web_url})"
 
 
 def get_projects_from_group(gitlab, base_group):
@@ -245,13 +240,13 @@ def main(args):
     gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token)
     group = gitlab.groups.get(options.groupid)
 
-    print("Getting projects in group {}".format(group.name))
+    print(f"Getting projects in group {group.name}")
 
     projects = get_projects_from_group(gitlab, group)
     p = {}
     for project in projects:
         exportpath = (
-            os.path.join(options.exportpath, project.path_with_namespace) + ".tar.gz"
+            f"{os.path.join(options.exportpath, project.path_with_namespace)}.tar.gz"
         )
         os.makedirs(os.path.dirname(exportpath), exist_ok=True)
         exportobject = project.exports.create(data=None)
@@ -261,7 +256,7 @@ def main(args):
             "export": exportobject,
             "downloaded": False,
         }
-        print("Triggered creation of export for {}".format(project.name))
+        print(f"Triggered creation of export for {project.name}")
 
     while True:
         in_started_state = 0
@@ -276,36 +271,30 @@ def main(args):
             p[project]["laststatus"] = status
 
             if status != "finished":
-                logging.debug("Project export status for %s: %s", project, status)
+                logging.debug(f"Project export status for {project}: {status}")
                 if status in ["started", "queued", "regeneration_in_progress"]:
                     in_started_state += 1
                 continue
 
-            print("Downloading export for {}".format(project))
+            print(f"Downloading export for {project}")
             exportpath = p[project]["exportpath"]
             with open(exportpath, "w+b") as f:
                 f.write(export.download())
             p[project]["downloaded"] = True
         if in_started_state == 0:
             break
-        print("Waiting for exports to be finished ({})".format(in_started_state))
+        print(f"Waiting for exports to be finished ({in_started_state})")
 
         time.sleep(5)
 
     for project in p:
         if p[project]["downloaded"]:
-            print(
-                "Project {} was downloaded to {}".format(
-                    project, p[project]["exportpath"]
-                )
-            )
+            print(f"Project {project} was downloaded to {p[project]['exportpath']}")
 
     for project in p:
         if not p[project]["downloaded"]:
             print(
-                "Project {}: export failed with status {}".format(
-                    project, p[project]["laststatus"]
-                )
+                f"Project {project}: export failed with status {p[project]['laststatus']}"
             )
 
 
diff --git a/scripts/integrate_into_manifest.py b/scripts/integrate_into_manifest.py
index 8f6e0ab5..f7fb60dc 100755
--- a/scripts/integrate_into_manifest.py
+++ b/scripts/integrate_into_manifest.py
@@ -39,7 +39,7 @@ def update_manifest(manifest, project: Project, new_revision):
 
     # Get current project revision from manifest
     old_revision = project_node.get("revision")
-    logging.debug("Replace %s with %s", old_revision, new_revision)
+    logging.debug(f"Replace {old_revision} with {new_revision}")
 
     # Update manifest file
     # We are doing this using a plain text replace action. Unfortunately
@@ -93,15 +93,15 @@ def integrate_into_manifest(
 
         # Checkout manifest
         # TODO replace checkout with gitlab api access
-        print("Cloning manifest repo: %s" % manifest_project.http_url_to_repo)
+        print(f"Cloning manifest repo: {manifest_project.http_url_to_repo}")
         try:
             manifest_repo = Repo.clone_from(
                 clone_url.url, manifest_dir, branch=manifest_branch
             )
         except GitCommandError as e:
-            sys.exit("ERROR: could not clone manifest repository\n" + str(e))
+            sys.exit(f"ERROR: could not clone manifest repository\n{e}")
         except IndexError:
-            sys.exit("ERROR: branch '%s' not found" % manifest_branch)
+            sys.exit(f"ERROR: branch '{manifest_branch}' not found")
 
         # Special handling for the gitlab-ci integration
         # When the branch 'merge_request.source_branch' already starts with
@@ -113,12 +113,12 @@ def integrate_into_manifest(
 
         if integration_branch is not None:
             manifest_repo.git.checkout(
-                "-b", integration_branch, "origin/{}".format(integration_branch)
+                "-b", integration_branch, f"origin/{integration_branch}"
             )
-            logging.debug("Heads: %s", manifest_repo.heads)
+            logging.debug(f"Heads: {manifest_repo.heads}")
             manifest_repo.heads[integration_branch].checkout()
             logging.debug(manifest_repo.git.log("--oneline", "-n", "5"))
-            print("Using existing integration branch: %s" % integration_branch)
+            print(f"Using existing integration branch: {integration_branch}")
         else:
             # Create integration branch (delete former one if already exists)
             integration_branch = common.integration_branch_name(
@@ -128,7 +128,7 @@ def integrate_into_manifest(
                 if integration_branch == ref.name:
                     manifest_repo.delete_head(ref)
 
-            print("Creating integration branch: %s" % integration_branch)
+            print(f"Creating integration branch: {integration_branch}")
             manifest_repo.head.set_reference(
                 manifest_repo.create_head(integration_branch)
             )
@@ -140,7 +140,7 @@ def integrate_into_manifest(
             with open(manifest_filepath.as_posix(), "r", encoding="utf8") as fp:
                 manifest = fp.read()
         except FileNotFoundError:
-            sys.exit("ERROR: file '%s' not found in manifest repo" % manifest_file)
+            sys.exit(f"ERROR: file '{manifest_file}' not found in manifest repo")
 
         new_manifest = update_manifest(manifest, project, new_revision)
         if new_manifest is not None:
@@ -152,8 +152,8 @@ def integrate_into_manifest(
             # Look for project in SRCREV as it has not been found in the manifest
             if recipe_name is None:
                 sys.exit(
-                    "ERROR: project '%s' not found in manifest and "
-                    "no recipe name is specified" % project.path
+                    f"ERROR: project '{project.path}' not found in manifest and "
+                    f"no recipe name is specified"
                 )
 
             with open(srcrev_filepath, "r", encoding="utf8") as fp:
@@ -162,8 +162,8 @@ def integrate_into_manifest(
             # write file
             if new_srcrev is None:
                 sys.exit(
-                    "ERROR: project '%s' not found in manifest and "
-                    "no recipe name is specified" % project.path
+                    f"ERROR: project '{project.path}' not found in manifest and "
+                    f"no recipe name is specified"
                 )
             with open(srcrev_filepath.as_posix(), "w", encoding="utf8") as fp:
                 fp.write(new_srcrev)
@@ -185,7 +185,7 @@ def integrate_into_manifest(
             gitlab.user.username,
             gitlab.user.email,
         )
-        logging.debug("New revision in manifest: %s", manifest_revision)
+        logging.debug(f"New revision in manifest: {manifest_revision}")
 
         return manifest_revision
 
@@ -272,12 +272,12 @@ def main():
 
     manifest_project = common.get_project(gitlab, args.manifest_project)
     project = common.get_project(gitlab, args.project)
-    logging.debug("Project: %s", project.name)
-    logging.debug("Merge Request: %s", args.merge_request)
+    logging.debug(f"Project: {project.name}")
+    logging.debug(f"Merge Request: {args.merge_request}")
 
     merge_request = common.get_merge_request(project, args.merge_request)
     if merge_request is None:
-        sys.exit("ERROR: could not get %s  %s" % (project.name, args.merge_request))
+        sys.exit(f"ERROR: could not get {project.name}  {args.merge_request}")
 
     manifest_revision = integrate_into_manifest(
         manifest_project=manifest_project,
diff --git a/scripts/lava_api.py b/scripts/lava_api.py
index e66f0d27..d80d209e 100755
--- a/scripts/lava_api.py
+++ b/scripts/lava_api.py
@@ -30,44 +30,42 @@ class Lava:
     def plain_request(self, url):
         response = requests.get(
             url=url,
-            headers={"Authorization": "Token %s" % self._token},
+            headers={"Authorization": f"Token {self._token}"},
         )
 
         # Handle errors
         if not response.ok:
-            logging.error("Response from lava %d", response.status_code)
+            logging.error(f"Response from lava {response.status_code}")
             return response.ok, None
-        logging.debug("Response from lava %d", response.status_code)
+        logging.debug(f"Response from lava {response.status_code}")
 
         return response.ok, response.text
 
     def request(self, suburl):
         # Get test results via LAVA REST API
-        ok, text = self.plain_request(
-            "http://{host}/api/v0.2/{suburl}".format(host=self._host, suburl=suburl)
-        )
+        ok, text = self.plain_request(f"http://{self._host}/api/v0.2/{suburl}")
         return ok, text
 
     def request_json(self, suburl):
         ok, text = self.request(suburl)
         if not ok:
-            raise LavaException("Failed to query lava suburl: %s" % suburl)
+            raise LavaException(f"Failed to query lava suburl: {suburl}")
 
         return json.loads(text)
 
     def get_job_suites(self, jobid):
-        return self.request_json("jobs/{}/suites/".format(jobid))
+        return self.request_json(f"jobs/{jobid}/suites/")
 
     def get_job_details(self, jobid):
-        return self.request_json("jobs/{}/".format(jobid))
+        return self.request_json(f"jobs/{jobid}/")
 
     def get_test_results(self, jobid):
-        return self.request_json("jobs/{}/tests/".format(jobid))
+        return self.request_json(f"jobs/{jobid}/tests/")
 
     def get_test_log(self, jobid):
-        ok, content = self.request("jobs/{}/logs/".format(jobid))
+        ok, content = self.request(f"jobs/{jobid}/logs/")
         if not ok:
-            raise LavaException("Failed to query lava for test log of job %d" % jobid)
+            raise LavaException(f"Failed to query lava for test log of job {jobid}")
 
         # print(content)
         yaml = YAML(typ="safe")
@@ -77,10 +75,10 @@ class Lava:
         return self.request_json("devices/")
 
     def get_device(self, name):
-        return self.request_json("devices/{}/".format(name))
+        return self.request_json(f"devices/{name}/")
 
     def get_devicetypes(self, name):
-        return self.request_json("devicetypes/{}".format(name))
+        return self.request_json(f"devicetypes/{name}")
 
     def get_tag_list(self):
         return self.request_json("/tags/")
@@ -146,7 +144,7 @@ class LavaTest:
         return self.timestamp < other.timestamp
 
     def __str__(self):
-        return self.name + ": " + str(self.result)
+        return f"{self.name}: {str(self.result)}"
 
     def _search_log_for_test(self, log, log_end):
         """Loops reversely over the log and tries to find the relevant lines
@@ -189,12 +187,12 @@ class LavaTest:
 
             # Search for the lava test description
             if line["lvl"] == "target" and line["msg"].startswith(
-                "<TESTCASE_DESCRIPTION_END {}>".format(self.test_case_id)
+                f"<TESTCASE_DESCRIPTION_END {self.test_case_id}>"
             ):
                 description_end = line_no
 
             if line["lvl"] == "target" and line["msg"].startswith(
-                "<TESTCASE_DESCRIPTION_START {}>".format(self.test_case_id)
+                f"<TESTCASE_DESCRIPTION_START {self.test_case_id}>"
             ):
                 description_start = line_no
 
@@ -275,7 +273,7 @@ class LavaDevice:
         }
 
     def __str__(self):
-        return "LavaDevice " + self.name
+        return f"LavaDevice {self.name}"
 
 
 class LavaTag:
@@ -287,7 +285,7 @@ class LavaTag:
         self.id = data["id"]
 
     def __str__(self):
-        return "LavaTag " + self.name
+        return f"LavaTag {self.name}"
 
     @property
     def is_serial(self):
@@ -325,7 +323,7 @@ class LavaSuite:
                 lava_test = LavaTest(self, test, joblog)
                 self.tests.append(lava_test)
             except LavaException as e:
-                logging.debug("%s: %s", str(e), test["name"])
+                logging.debug(f"{e}: {test['name']}")
 
         self.tests.sort()
 
@@ -348,7 +346,7 @@ class LavaSuite:
         return self.tests[0] < other.tests[0]
 
     def __str__(self):
-        return "TestSuite " + self.name
+        return f"TestSuite {self.name}"
 
 
 class LavaJob:
@@ -385,7 +383,7 @@ class LavaJob:
 
         if not self.ok:
             logging.warning(
-                "The job %d is not finished or it infrastructure problems", jobid
+                f"The job {jobid} is not finished or it infrastructure problems"
             )
             return
 
@@ -397,7 +395,7 @@ class LavaJob:
                 lava_suite = LavaSuite(self, suite, self._test_log)
                 self.suites.append(lava_suite)
             except LavaException as e:
-                logging.debug("%s: %s", str(e), suite["name"])
+                logging.debug(f"{e}: {suite['name']}")
 
         self.suites.sort()
 
diff --git a/scripts/lava_create_testreport.py b/scripts/lava_create_testreport.py
index adaeb857..fdb903a6 100755
--- a/scripts/lava_create_testreport.py
+++ b/scripts/lava_create_testreport.py
@@ -62,7 +62,7 @@ class TestCollection:
             return "Passed"
         if count == 0:
             return "Failed"
-        return "Partly failed ({} of {})".format(len(tests) - count, len(tests))
+        return f"Partly failed ({len(tests) - count} of {len(tests)})"
 
     @staticmethod
     def collections_from_suites(suite_list):
@@ -79,7 +79,7 @@ class TestCollection:
         return [TestCollection(k, v) for k, v in tests.items()]
 
     def __str__(self):
-        return "TestCollection " + self.name
+        return f"TestCollection {self.name}"
 
 
 class SuiteCollection:
@@ -114,7 +114,7 @@ class SuiteCollection:
             suite_col = c.name
             for t in c.test_collections:
                 row = table.get_row()
-                row.add_content([suite_col, "[{}](#{})".format(t.name, t.test_case_id)])
+                row.add_content([suite_col, f"[{t.name}](#{t.test_case_id})"])
                 for m in all_machines:
                     row.add_content(t.get_result(m))
                 suite_col = ""
@@ -140,7 +140,7 @@ class SuiteCollection:
         return self.name < other.name
 
     def __str__(self):
-        return "SuiteCollection " + self.name
+        return f"SuiteCollection {self.name}"
 
     @staticmethod
     def collections_from_jobs(job_list):
@@ -212,14 +212,13 @@ def lava_create_testreport(jobids, lava=None):
     for j in lava_jobs:
         if not j.ok:
             doc.add_text_block(
-                "NOTE: Lava job {} for device {} did not complete, results are not part of the table".format(
-                    j.jobid, j.machine.capitalize()
-                )
+                f"NOTE: Lava job {j.jobid} for device {j.machine.capitalize()} did not "
+                f"complete, results are not part of the table"
             )
 
     doc.add_header("Test details")
     for c in suite_collections:
-        doc.add_header("Test suite {}\n".format(c.name), level=2)
+        doc.add_header(f"Test suite {c.name}\n", level=2)
         for t in c.test_collections:
             doc.add_anchor(t.test_case_id)
             collapse = doc.add_collapse_block(t.name)
diff --git a/scripts/lava_credentials.py b/scripts/lava_credentials.py
index ea598e3e..4663aa7f 100755
--- a/scripts/lava_credentials.py
+++ b/scripts/lava_credentials.py
@@ -37,8 +37,8 @@ def get_lava_credentials(host=None):
             lava_user = config.get(host, "LAVA_USER")
         except configparser.Error:
             print(
-                "Missing user name for '%s'. Please either set it via the LAVA_USER"
-                "environment variable or enter it now." % host
+                f"Missing user name for '{host}'. Please either set it via the "
+                f"LAVA_USER environment variable or enter it now."
             )
             lava_user = input("LAVA_USER: ")
             new_data = True
@@ -49,8 +49,8 @@ def get_lava_credentials(host=None):
             lava_token = config.get(host, "LAVA_TOKEN")
         except configparser.Error:
             print(
-                "Missing authentication token for '%s@%s'. Please either set it via the"
-                "LAVA_TOKEN environment variable or enter it now." % (lava_user, host)
+                f"Missing authentication token for '{lava_user}@{host}'. Please either "
+                f"set it via the LAVA_TOKEN environment variable or enter it now."
             )
             lava_token = getpass.getpass("LAVA_TOKEN: ")
             new_data = True
@@ -61,7 +61,7 @@ def get_lava_credentials(host=None):
         choice = None
         while choice not in valid:
             sys.stdout.write(
-                "Do you want to save these credentials in '%s'? " "[Y/n] " % configfile
+                f"Do you want to save these credentials in '{configfile}'? [Y/n] "
             )
             choice = input().lower()
         if valid[choice]:
diff --git a/scripts/markdown_generator.py b/scripts/markdown_generator.py
index 4a9b8f94..1c71a1b7 100755
--- a/scripts/markdown_generator.py
+++ b/scripts/markdown_generator.py
@@ -25,13 +25,13 @@ class MarkDownText:
             return [""]
         for i in range(0, len(out)):
             if self.bold:
-                out[i] = "**{}**".format(out[i])
+                out[i] = f"**{out[i]}**"
             if self.italic:
-                out[i] = "_{}_".format(out[i])
+                out[i] = f"_{out[i]}_"
         return out
 
     def __str__(self):
-        return __class__ + " " + self.text
+        return f"{__class__} {self.text}"
 
 
 class MarkDownTableCell(MarkDownText):
@@ -41,7 +41,7 @@ class MarkDownTableCell(MarkDownText):
         super().__init__(content, bold, italic)
 
     def __str__(self):
-        return "MarkDownTableCell (" + self.row.row + ":" + self.col + ") " + self.text
+        return f"MarkDownTableCell ({self.row.row}:{self.col}) {self.text}"
 
     @property
     def alignment(self):
@@ -63,7 +63,7 @@ class MarkDownTableCell(MarkDownText):
             c = c.rjust(width)
         else:  # default 'l'
             c = c.ljust(width)
-        c = " " + c + " "
+        c = f" {c} "
         return [c]
 
 
@@ -97,13 +97,13 @@ class MarkDownTableRow:
             self._current_col += 1
 
     def __str__(self):
-        c = "".join("| " + i.content + " |" for i in self.cols)
-        return "MarkDownTableRow (" + self.length + " cols) " + c
+        c = "".join(f"| {i.content} |" for i in self.cols)
+        return f"MarkDownTableRow ({self.length} cols) {c}"
 
     def render(self):
         output = "|"
         for c in self.cols:
-            output += c.render()[0] + "|"
+            output += f"{c.render()[0]}|"
         return [output]
 
 
@@ -135,7 +135,7 @@ class MarkDownTable:
         return self.get_row(0)
 
     def __str__(self):
-        return "MarkDownTable (" + self.length + " rows)"
+        return f"MarkDownTable ({self.length} rows)"
 
     def get_alignment(self, col):
         return self.alignment[col] if col < len(self.alignment) else self.alignment[-1]
@@ -228,7 +228,7 @@ class MarkDownAnchor:
         self.name = name
 
     def render(self):
-        out = '<a name="{}"></a>'.format(self.name)
+        out = f'<a name="{self.name}"></a>'
         return [out]
 
 
@@ -293,7 +293,7 @@ class MarkDownCollapseBlock(MarkDownDoc):
         out = []
         out += ["<details>"]
         if self.title is not None:
-            out += ["<summary>{}</summary>".format(self.title)]
+            out += [f"<summary>{self.title}</summary>"]
         out += super().render()
         out += ["</details>"]
         return out
diff --git a/scripts/merge_into_manifest.py b/scripts/merge_into_manifest.py
index 0e9fee45..24b329de 100755
--- a/scripts/merge_into_manifest.py
+++ b/scripts/merge_into_manifest.py
@@ -36,20 +36,17 @@ def merge_into_manifest(
         commit=commit,
     )
     if not mrs:
-        sys.exit(
-            "ERROR: could not determine source merge request for commit %s" % commit
-        )
+        sys.exit(f"ERROR: could not determine source merge request for commit {commit}")
     source_mr = mrs[0]
 
     # Get original branch for commit
     original_branch = source_mr.source_branch
-    logging.debug("Original branch %s", original_branch)
+    logging.debug(f"Original branch {original_branch}")
 
     if common.is_gitlab_ci_integration_branch(original_branch):
         print(
-            "Commit {} is on a gitlab-ci integration branch and will be merged by the gitlab-ci pipeline.".format(
-                commit
-            )
+            f"Commit {commit} is on a gitlab-ci integration branch and will be merged "
+            f"by the gitlab-ci pipeline."
         )
         return ""
 
@@ -65,7 +62,7 @@ def merge_into_manifest(
         manifest_project, integration_branch, target_branch
     )
     if not created:
-        sys.exit("ERROR: There is already an open merge request:\n%s" % mr.web_url)
+        sys.exit(f"ERROR: There is already an open merge request:\n{mr.web_url}")
 
     print("Created new merge request:")
     print(mr.web_url)
diff --git a/scripts/mirror_mr_pipeline.py b/scripts/mirror_mr_pipeline.py
index 3b1758a5..dcd8fac3 100755
--- a/scripts/mirror_mr_pipeline.py
+++ b/scripts/mirror_mr_pipeline.py
@@ -35,8 +35,7 @@ def get_pipelines(project: Project, commit: str, ref: str):
 
     if not pipelines:
         raise LookupError(
-            "No pipeline for commit '%s'%s found"
-            % (commit, " on ref '%s'" % ref if ref else "")
+            f"""No pipeline for commit '{commit}'{f" on ref '{ref}'" if ref else ""} found"""
         )
 
     return pipelines
@@ -83,11 +82,11 @@ def main():
 
     # Get MR pipeline
     try:
-        pipelines = get_pipelines(project, args.commit, "^" + args.ref)
+        pipelines = get_pipelines(project, args.commit, f"^{args.ref}")
         pipeline = pipelines[0]
 
         # Wait for pipeline termination
-        print("Pipeline: %s" % pipeline.web_url)
+        print(f"Pipeline: {pipeline.web_url}")
         if pipeline.status not in TERMINATED_STATES:
             print("Waiting for completion", end="")
             while pipeline.status not in TERMINATED_STATES:
@@ -98,7 +97,7 @@ def main():
                 )
             print("")
 
-        print("Result: %s" % pipeline.status, flush=True)
+        print(f"Result: {pipeline.status}", flush=True)
 
         # Mirror result in success/failure case
         if pipeline.status == "success":
@@ -113,9 +112,9 @@ def main():
     try:
         pipeline = project.pipelines.create({"ref": args.ref})
     except GitlabCreateError as e:
-        sys.exit("ERROR: %s" % e)
+        sys.exit(f"ERROR: {e}")
 
-    print("Created new pipeline for %s:" % args.ref)
+    print(f"Created new pipeline for {args.ref}:")
     print(pipeline.web_url)
 
     sys.exit(0)
diff --git a/scripts/package_release.py b/scripts/package_release.py
index 076a3389..48aad667 100755
--- a/scripts/package_release.py
+++ b/scripts/package_release.py
@@ -35,7 +35,7 @@ def generate_md5sums_file(input_files: list[str], output_file: str):
         os.makedirs(output_dir, exist_ok=True)
     with open(output_file, "w", encoding="utf-8") as f_md5:
         for f, h in md5sums.items():
-            f_md5.write("{}  {}\n".format(h, f))
+            f_md5.write(f"{h}  {f}\n")
 
 
 def copy_files(files: list[str], target_dir: str):
@@ -49,10 +49,10 @@ def copy_files(files: list[str], target_dir: str):
     for source_file in files:
         if os.path.exists(source_file):
             target_file = os.path.join(target_dir, os.path.basename(source_file))
-            print("Copy: %s -> %s" % (source_file, target_file))
+            print(f"Copy: {source_file} -> {target_file}")
             shutil.copyfile(source_file, target_file, follow_symlinks=True)
         else:
-            print("Missing: " + source_file)
+            print(f"Missing: {source_file}")
 
 
 def main():
@@ -102,11 +102,11 @@ def main():
     if not testdata_files:
         if args.images_dir is not None:
             print(args.images_dir)
-            for f in glob.glob(args.images_dir + "/*"):
+            for f in glob.glob(f"{args.images_dir}/*"):
                 print("-- ", f)
         if args.sdk_dir is not None:
             print(args.sdk_dir)
-            for f in glob.glob(args.sdk_dir + "/*"):
+            for f in glob.glob(f"{args.sdk_dir}/*"):
                 print("-- ", f)
         sys.exit("ERROR: no *.testdata.json file found in image or sdk dir.")
 
@@ -126,7 +126,7 @@ def main():
     if version.startswith("fngsystem"):
         release_name = version.replace("fngsystem", "FNGSystem")
     else:
-        release_name = "Yocto-%s" % version
+        release_name = f"Yocto-{version}"
 
     # Append release suffix
     if args.release_suffix is not None:
@@ -145,7 +145,7 @@ def main():
         doc_files = glob.glob(os.path.join(args.doc_dir, "*.md"))
         html_files = []
         for f in doc_files:
-            fout = os.path.splitext(f)[0] + ".html"
+            fout = f"{os.path.splitext(f)[0]}.html"
             convertmd2html(f, fout)
             html_files.append(fout)
 
@@ -164,8 +164,8 @@ def main():
     if args.images_dir is not None:
         # Add some additional files to the artifacts
         for artifact in image_artifacts:
-            artifacts.append(artifact.split(".")[0] + ".manifest")
-            artifacts.append(artifact.split(".")[0] + ".testdata.json")
+            artifacts.append(f"{artifact.split('.')[0]}.manifest")
+            artifacts.append(f"{artifact.split('.')[0]}.testdata.json")
 
         # Prepend path to artifacts
         artifacts = [os.path.join(args.images_dir, artifact) for artifact in artifacts]
@@ -203,7 +203,7 @@ def main():
 
     # Package SDK
     if args.sdk_dir is not None:
-        sdkfiles = glob.glob(os.path.join(args.sdk_dir, sdkname + "*"))
+        sdkfiles = glob.glob(os.path.join(args.sdk_dir, f"{sdkname}*"))
 
         # Generate MD5 sums file
         sdk_md5sums_file = os.path.join(machine, "sdk", "md5sums.txt")
@@ -216,9 +216,9 @@ def main():
 
     # Store pathes and other stuff in environment variable file
     with open("package.env", "w", encoding="utf-8") as env_file:
-        env_file.write("RELEASE_NAME={}\n".format(release_name))
-        env_file.write("VERSION={}\n".format(version))
-        env_file.write("MACHINE={}\n".format(machine))
+        env_file.write(f"RELEASE_NAME={release_name}\n")
+        env_file.write(f"VERSION={version}\n")
+        env_file.write(f"MACHINE={machine}\n")
 
 
 if __name__ == "__main__":
diff --git a/scripts/pylintrc b/scripts/pylintrc
index 6bc07365..104b3d95 100644
--- a/scripts/pylintrc
+++ b/scripts/pylintrc
@@ -2,9 +2,14 @@
 
 # Disable some verifications
 disable =
-	C,			# conventions
-	R,			# refactoring
-	W0511,		# fixme warnings
+    # conventions
+    C,
+    # refactoring
+    R,
+    # fixme warnings
+    W0511,
+    # logging-fstring-interpolation (consistent string formatting outweighs slightly worse performance)
+    W1203,
 
 # Whitelist lxml package to disable I1101
 extension-pkg-whitelist = lxml
diff --git a/scripts/render_jinja2_template.py b/scripts/render_jinja2_template.py
index 14b692d4..0434ff4d 100755
--- a/scripts/render_jinja2_template.py
+++ b/scripts/render_jinja2_template.py
@@ -62,7 +62,7 @@ def main():
     args, unknown = parser.parse_known_args()
     if args.verbose:
         logging.basicConfig(level=logging.DEBUG)
-    logging.debug("Using template: %s", args.template)
+    logging.debug(f"Using template: {args.template}")
 
     # parse optional arguments
     parser = argparse.ArgumentParser()
@@ -70,7 +70,7 @@ def main():
         if arg.startswith(("-", "--")):
             parser.add_argument(arg.split("=")[0])
     optional_args, _ = parser.parse_known_args()
-    logging.debug("Optional args: %s", optional_args)
+    logging.debug(f"Optional args: {optional_args}")
 
     # update context with given optional arguments
     j2_env = Environment(undefined=StrictUndefined, extensions=["jinja2.ext.do"])
@@ -81,7 +81,7 @@ def main():
         context["optional_arguments"] = args.arguments
 
     for k, v in context.items():
-        logging.debug("Optional args: %s -- %s", k, v)
+        logging.debug(f"Optional args: {k} -- {v}")
 
     j2_env.filters["basename"] = os.path.basename
     j2_env.filters["splitext"] = os.path.splitext
diff --git a/scripts/retrigger_integrating_projects.py b/scripts/retrigger_integrating_projects.py
index ea84f424..586e9b5c 100755
--- a/scripts/retrigger_integrating_projects.py
+++ b/scripts/retrigger_integrating_projects.py
@@ -79,7 +79,7 @@ def main():
         for mr in mrs:
             # Get pipeline
             if not mr.pipeline:
-                print("No pipeline in %s" % mr.web_url)
+                print(f"No pipeline in {mr.web_url}")
                 continue
             pipeline = project.pipelines.get(
                 mr.pipeline.get("id"),
@@ -95,13 +95,12 @@ def main():
                 )
                 if not jobs:
                     print(
-                        "Could not find any jobs named '%s' with states %s in %s"
-                        % (args.job, states, pipeline.web_url)
+                        f"Could not find any jobs named '{args.job}' with states "
+                        f"{states} in {pipeline.web_url}"
                     )
             except GitlabJobRetryError as e:
                 print(
-                    "ERROR: Could not retrigger job '%s' in %s: %s"
-                    % (args.job, pipeline.web_url, e)
+                    f"ERROR: Could not retrigger job '{args.job}' in {pipeline.web_url}: {e}"
                 )
                 failed = failed + 1
                 continue
diff --git a/scripts/retrigger_pipeline_jobs.py b/scripts/retrigger_pipeline_jobs.py
index ba6a03ab..7831a0e2 100755
--- a/scripts/retrigger_pipeline_jobs.py
+++ b/scripts/retrigger_pipeline_jobs.py
@@ -52,7 +52,7 @@ def retrigger_pipeline_jobs(
 
     # Retrigger job
     job.retry()
-    print("Retrigger job '%s' in %s:" % (job_name, pipeline.web_url))
+    print(f"Retrigger job '{job_name}' in {pipeline.web_url}:")
     job = project.jobs.get(job.id, retry_transient_errors=True)
     print(job.web_url)
     jobs.append(job)
@@ -108,9 +108,9 @@ def main():
     try:
         pipeline = project.pipelines.get(args.pipeline, retry_transient_errors=True)
     except GitlabGetError as e:
-        sys.exit("ERROR: could not get pipeline %s: %s" % (args.pipeline, e))
+        sys.exit(f"ERROR: could not get pipeline {args.pipeline}: {e}")
     if not pipeline:
-        sys.exit("ERROR: could not find pipeline %s" % args.pipeline)
+        sys.exit(f"ERROR: could not find pipeline {args.pipeline}")
 
     jobs = retrigger_pipeline_jobs(
         project,
@@ -120,7 +120,7 @@ def main():
         args.include_children,
     )
 
-    print("Retriggered %d jobs for pipeline #%s" % (len(jobs), args.pipeline))
+    print(f"Retriggered {len(jobs)} jobs for pipeline #{args.pipeline}")
 
 
 if __name__ == "__main__":
diff --git a/scripts/submit_test.py b/scripts/submit_test.py
index f5ea1fc9..f74e0a87 100755
--- a/scripts/submit_test.py
+++ b/scripts/submit_test.py
@@ -16,7 +16,7 @@ TESTS_GIT_URL = "git@gitlab.com:garz-fricke/tests/development-tests.git"
 
 
 def call(cmd, stdout=None):
-    logging.debug("Call: %s", cmd)
+    logging.debug(f"Call: {cmd}")
     try:
         if stdout is None:
             result = subprocess.run(cmd, capture_output=True, check=True)
@@ -25,11 +25,11 @@ def call(cmd, stdout=None):
     except subprocess.CalledProcessError as e:
         out = e.stdout.decode() if e.stdout is not None else ""
         err = e.stderr.decode() if e.stderr is not None else ""
-        logging.error("Command failed %s: %s %s", cmd, out, err)
+        logging.error(f"Command failed {cmd}: {out} {err}")
         exit(1)
     if result is not None and result.stdout is not None:
         res = result.stdout.decode().strip()
-        logging.debug("Command returned: %s", res)
+        logging.debug(f"Command returned: {res}")
     else:
         res = ""
     return res
@@ -127,7 +127,7 @@ def main():
         if os.path.exists(checkout_path):
             shutil.rmtree(checkout_path)
 
-        logging.debug("Cloning %s", args.testrepo)
+        logging.debug(f"Cloning {args.testrepo}")
         try:
             if args.testrepo_branch is not None:
                 testrepo = git.Repo.clone_from(
@@ -142,7 +142,7 @@ def main():
         checkout_path = args.checkout_path
         testrepo = git.Repo(checkout_path)
 
-        logging.debug("Using test repo at %s", checkout_path)
+        logging.debug(f"Using test repo at {checkout_path}")
 
     testrepo_revision = testrepo.head.commit.hexsha
 
@@ -157,14 +157,14 @@ def main():
     cmd_submitall = os.path.join(checkout_path, "scripts", "submit_all.py")
     cmd_generate = os.path.join(checkout_path, "scripts", "generate_lava_job.py")
 
-    logging.debug("Test suite %s", args.test_suite)
+    logging.debug(f"Test suite {args.test_suite}")
 
     for platform in args.platforms:
 
         test_suite = os.path.join(
             checkout_path, "tests", args.test_suite.format(platform=platform)
         )
-        logging.debug("Test suite %s", test_suite)
+        logging.debug(f"Test suite {test_suite}")
 
         if os.path.splitext(test_suite)[1] == ".jinja2":
             cmd = [
@@ -180,8 +180,8 @@ def main():
                 "--name",
                 args.name.format(platform=platform),
             ]
-            logging.debug("Generate job: %s", cmd)
-            jobfile = os.path.join(args.results_path, "{}.yaml".format(platform))
+            logging.debug(f"Generate job: {cmd}")
+            jobfile = os.path.join(args.results_path, f"{platform}.yaml")
             with open(jobfile, "w", encoding="utf-8") as jobfile_handle:
                 call(cmd, stdout=jobfile_handle)
         else:
@@ -193,17 +193,17 @@ def main():
             cmd = [cmd_submit, jobfile]
 
         if args.dry:
-            print("Skipping submit because of dry run: %s", cmd)
+            print(f"Skipping submit because of dry run: {cmd}")
             continue
         result = call(cmd)
 
         for line in result.splitlines():
             url = line.strip().replace("\n", "")
-            print("Started testjob {}: {}".format(platform, url))
+            print(f"Started testjob {platform}: {url}")
             jobid = url.split("/")[-1]
             jobs[jobid] = [platform, ""]
 
-    logging.debug("Queued jobs: %s", jobs)
+    logging.debug(f"Queued jobs: {jobs}")
     if not jobs:
         logging.info("No jobs queued.")
         return 0
@@ -229,14 +229,14 @@ def main():
 
         # Get results as yaml
         resultfile = os.path.join(
-            args.results_path, "results-{}-{}.yaml".format(jobid, jobplatform)
+            args.results_path, f"results-{jobid}-{jobplatform}.yaml"
         )
         with open(resultfile, "w", encoding="utf-8") as resultfile_handle:
             call([cmd_query, "--get-results", jobid], stdout=resultfile_handle)
 
         # Get results as junit xml
         resultfile = os.path.join(
-            args.results_path, "results-{}-{}.xml".format(jobid, jobplatform)
+            args.results_path, f"results-{jobid}-{jobplatform}.xml"
         )
         with open(resultfile, "w", encoding="utf-8") as resultfile_handle:
             call(
@@ -256,11 +256,7 @@ def main():
 
         if jobresult != "Complete" or joberrors > 0:
             passed = False
-        print(
-            "Test result for {}: {} Errors, {}".format(
-                jobplatform, joberrors, jobresult
-            )
-        )
+        print(f"Test result for {jobplatform}: {joberrors} Errors, {jobresult}")
 
     # Create report as MarkDown
     if args.report_name is not None:
diff --git a/scripts/update_submodule.py b/scripts/update_submodule.py
index ff8ee342..29fbc984 100755
--- a/scripts/update_submodule.py
+++ b/scripts/update_submodule.py
@@ -19,24 +19,24 @@ def get_submodule_project_path_and_revision(project: Project, submodule, branch=
 
     gitmodules = common.get_repository_file_raw(project, ".gitmodules", ref=branch)
     if gitmodules is None:
-        logging.error("Submodule %s not found in %s.", submodule, project.name)
+        logging.error(f"Submodule {submodule} not found in {project.name}.")
         return None, None
 
-    # logging.debug("Gitmodules: %s", gitmodules)
+    # logging.debug(f"Gitmodules: {gitmodules}")
 
     cfgparse = ConfigParser()
     cfgparse.read_string(gitmodules)
     try:
-        section = cfgparse['submodule "{}"'.format(submodule)]
+        section = cfgparse[f'submodule "{submodule}"']
     except KeyError:
-        logging.error("Submodule %s not found in %s.", submodule, project.name)
+        logging.error(f"Submodule {submodule} not found in {project.name}.")
         return None, None
 
     submodule_url = section["url"]
     # absolut path to a relative submodule
     # Check for relative path
     if not submodule_url.startswith(".."):
-        logging.error("absolute submodule paths are not supported (%s)", submodule_url)
+        logging.error(f"absolute submodule paths are not supported ({submodule_url})")
         return None, None
 
     # Get absolute project path
@@ -89,18 +89,16 @@ def clone_project_and_submodule(project: Project, submodule_name, branch=None):
     try:
         repo = Repo.clone_from(clone_url.url, project_dir.name, branch=branch, depth=1)
     except GitCommandError as e:
-        sys.exit("ERROR: could not clone repository\n" + str(e))
+        sys.exit(f"ERROR: could not clone repository\n{e}")
     except IndexError:
-        sys.exit("ERROR: branch '%s' not found" % branch)
+        sys.exit(f"ERROR: branch '{branch}' not found")
 
     # Find submodule
     submodule = common.get_submodule(repo, submodule_name)
 
     # Check for relative path
     if not submodule.url.startswith(".."):
-        sys.exit(
-            "ERROR: absolute submodule paths are not supported (%s)" % submodule.url
-        )
+        sys.exit(f"ERROR: absolute submodule paths are not supported ({submodule.url})")
 
     # Get absolute project path
     # This cannot be done with gitpython directly due to issue:
@@ -127,7 +125,7 @@ def clone_project_and_submodule(project: Project, submodule_name, branch=None):
         submodule.update(init=True)
     except GitCommandError:
         # This seems to happen when a not existing commit is referenced
-        logging.error("Failed to initialize submodule %s", submodule_name)
+        logging.error(f"Failed to initialize submodule {submodule_name}")
 
     with submodule.config_writer() as writer:
         writer.set("url", submodule_relative_url)
@@ -146,7 +144,7 @@ def update_submodule_in_repo(repo: Repo, submodule_project: Project, new_revisio
     try:
         submodule_project.module().git.checkout(new_revision)
     except GitCommandError as e:
-        sys.exit("ERROR: could not checkout commit\n" + str(e))
+        sys.exit(f"ERROR: could not checkout commit\n{e}")
     repo.git.add(submodule_project.path)
 
 
@@ -158,7 +156,7 @@ def update_gitlab_ci_include(content, include_project, new_revision):
 
     yaml = YAML()
     data = yaml.load(content)
-    logging.debug("Yaml: %s", data)
+    logging.debug(f"Yaml: {data}")
     try:
         includes = data["include"]
     except KeyError:
@@ -174,13 +172,13 @@ def update_gitlab_ci_include(content, include_project, new_revision):
             logging.debug("Failed to parse include statement")
             return None
     if current_revision is None:
-        logging.debug("Failed to find %s in include statement", include_project)
+        logging.debug(f"Failed to find {include_project} in include statement")
         return None
 
     # Use plain replacement to keep the content of the file
     # Yes, this may fail if the 'current_revision' is used multiple
     # time is this fail. But probably this will not ever happen
-    logging.debug("Replace %s with %s", current_revision, new_revision)
+    logging.debug(f"Replace {current_revision} with {new_revision}")
     return content.replace(current_revision, new_revision)
 
 
@@ -218,7 +216,7 @@ def update_submodule_and_include_ref(
 
     if branch is None:
         branch = project.default_branch
-    logging.debug("Branch: %s", branch)
+    logging.debug(f"Branch: {branch}")
 
     (
         submodule_project_path,
@@ -229,17 +227,17 @@ def update_submodule_and_include_ref(
     submodule_project = common.get_project(gitlab, submodule_project_path)
 
     # Get commits between current and new revision
-    revision_range = submodule_current_rev + ".." + new_revision
+    revision_range = f"{submodule_current_rev}..{new_revision}"
     commits = submodule_project.commits.list(
         ref_name=revision_range, retry_transient_errors=True
     )
     if not commits:
         logging.info(
-            "No commits found in range %s, probably submodule already up-to-date.",
-            revision_range,
+            f"No commits found in range {revision_range}, probably submodule already "
+            f"up-to-date."
         )
         return None, None, None, None, None
-    logging.debug("New commits: %s", commits)
+    logging.debug(f"New commits: {commits}")
 
     # Find out if top commit is top commit of a merge request
     # If so, use source branch of this MR as integration branch name
@@ -252,7 +250,7 @@ def update_submodule_and_include_ref(
         ):
             integration_branch_suffix = mr["source_branch"]
             break
-    logging.debug("Integration branch suffix: %s", integration_branch_suffix)
+    logging.debug(f"Integration branch suffix: {integration_branch_suffix}")
 
     # Construct integration branch name
     integration_branch_name = common.integration_branch_name(
@@ -269,7 +267,7 @@ def update_submodule_and_include_ref(
 
     # Check if revisions are different
     if submodule_current_rev == new_revision:
-        print("Submodule is already at %s" % new_revision)
+        print(f"Submodule is already at {new_revision}")
         submodule_update_needed = False
 
     # Check if we already have an integration branch (before we actually do the checkout)
@@ -298,7 +296,7 @@ def update_submodule_and_include_ref(
             # base or if it is outdated
             integration_base_branch = project.branches.get(branch)
             integration_base_id = integration_base_branch.commit["id"]
-            logging.debug("Head of %s points to %s", branch, integration_base_id)
+            logging.debug(f"Head of {branch} points to {integration_base_id}")
 
             # Loop over the commits until the integration_branch head id is found
             if common.is_commit_parent_of_project_commit(
@@ -312,15 +310,15 @@ def update_submodule_and_include_ref(
                     project, submodule_name, integration_branch_name
                 )
                 logging.debug(
-                    "Revision in integration branch '%s', new_revision '%s'",
-                    integration_branch_submodule_rev,
-                    new_revision,
+                    f"Revision in integration branch "
+                    f"'{integration_branch_submodule_rev}', "
+                    f"new_revision '{new_revision}'"
                 )
 
                 if integration_branch_submodule_rev == new_revision:
                     print(
-                        "Submodule is already at %s on branch %s"
-                        % (new_revision, integration_branch_name)
+                        f"Submodule is already at {new_revision} on branch "
+                        f"{integration_branch_name}"
                     )
                     integration_commit = existing_branch.commit["id"]
                     submodule_update_needed = False
@@ -341,10 +339,10 @@ def update_submodule_and_include_ref(
         )
 
         if existing_branch:
-            print("Using existing integration branch %s" % integration_branch_name)
+            print(f"Using existing integration branch {integration_branch_name}")
         else:
             # Create branch
-            print("Creating integration branch %s" % integration_branch_name)
+            print(f"Creating integration branch {integration_branch_name}")
             project_repo.head.set_reference(
                 project_repo.create_head(integration_branch_name)
             )
-- 
GitLab