From c292d1447f500262d733df620d291333ceed4a7b Mon Sep 17 00:00:00 2001 From: Tim Jaacks <tim.jaacks@seco.com> Date: Thu, 19 Oct 2023 09:57:27 +0200 Subject: [PATCH] Confluence: combine data from parallel child pipelines We deploy Yocto image and SDK in different pipelines, which both have their Confluence jobs with the same Confluence target. Previously these pipelines have been independent, so that running the Confluence job in one of them would overwrite the Confluence page if it has been run in the other pipeline before. Hence we could either have the image files or the SDK files in the Confluence page. Combine these now by checking for other parallel child pipelines with the same Confluence configuration, so that running the Confluence jobs in the Yocto pipeline also includes the information from the SDK build and vice versa. --- build-pipeline.yml | 2 + manifest-pipeline-ci-test.yml | 2 + manifest-pipeline-yocto.yml | 4 +- scripts/collect_release_information.py | 160 ++++++++++++++++++------- scripts/get_parent_pipeline.py | 78 ++++++++++++ 5 files changed, 204 insertions(+), 42 deletions(-) create mode 100755 scripts/get_parent_pipeline.py diff --git a/build-pipeline.yml b/build-pipeline.yml index 451a187c..010fff1e 100644 --- a/build-pipeline.yml +++ b/build-pipeline.yml @@ -126,7 +126,9 @@ workflow: - echo "TOOLCHAIN_OUTPUTNAME=${TOOLCHAIN_OUTPUTNAME}" >> build.env - echo "DISTRO_RELEASE_ARTEFACTS='${DISTRO_RELEASE_ARTEFACTS}'" >> build.env # Install script location is needed in test job + - if [[ ! -z "${INSTALL_SCRIPT}" ]]; then - echo "FNG_INSTALL_URL=${FNG_INSTALL_URL}" >> build.env + - fi # Machine and supported hardware are needed for Confluence release page - echo "MACHINE=${MACHINE}" >> build.env - echo "SUPPORTED_HARDWARE='${SUPPORTED_HARDWARE}'" >> build.env diff --git a/manifest-pipeline-ci-test.yml b/manifest-pipeline-ci-test.yml index 14ce3e7e..5d2f6833 100644 --- a/manifest-pipeline-ci-test.yml +++ b/manifest-pipeline-ci-test.yml @@ -99,3 +99,5 @@ sdk-simulation-pipeline: ARTIFACTS_PATH: build-*/tmp/deploy/sdk/* MANUAL_BUILD: "true" PACKAGE_TYPE: sdk + CONFLUENCE_SPACE: RnD + CONFLUENCE_PARENT_ID: 1615560743 diff --git a/manifest-pipeline-yocto.yml b/manifest-pipeline-yocto.yml index 6424e5a2..9d5c669a 100644 --- a/manifest-pipeline-yocto.yml +++ b/manifest-pipeline-yocto.yml @@ -38,6 +38,8 @@ variables: http://support.garz-fricke.com/projects/Linux-Yocto/Releases/ AZURE_TARGET_FOLDER: HMI/Linux-Yocto/Releases AZURE_CONTAINER_NAME: seco-ne-public + CONFLUENCE_SPACE: SECONorthTech + CONFLUENCE_PARENT_ID: 1479573518 .fngsystem-deploy: variables: @@ -64,8 +66,6 @@ yocto-pipeline: DOCUMENTATION_FILES: "*.md" PACKAGE_TYPE: image TEST_STAGE: "true" - CONFLUENCE_SPACE: SECONorthTech - CONFLUENCE_PARENT_ID: 1479573518 ALPHAPLAN_STAGE: "true" sdk-pipeline: diff --git a/scripts/collect_release_information.py b/scripts/collect_release_information.py index bfbe5837..6e44a68d 100755 --- a/scripts/collect_release_information.py +++ b/scripts/collect_release_information.py @@ -6,10 +6,15 @@ import tempfile from gitlab import GitlabGetError from gitlab.client import Gitlab -from gitlab.v4.objects import Project, ProjectPipelineJob +from gitlab.v4.objects import ( + Project, + ProjectPipelineJob, + ProjectPipelineVariableManager, +) import common from download_job_artifacts import download_job_artifact +from get_parent_pipeline import get_parent_pipeline from get_pipeline_jobs import get_pipeline_jobs @@ -65,6 +70,47 @@ def add_suffix_to_dict_keys(dictionary: dict, suffix: str): return {f"{key}{suffix}": dictionary[key] for key in dictionary} +def get_pipeline_variable_value(variables: ProjectPipelineVariableManager, key: str): + """Lookup a ProjectPipelineVariable in a list by its key and return its value + + Args: + variables: list of variables + key: key of the variable to lookup + + Returns: + Value + """ + try: + variable = next( + (variable for variable in variables.list() if variable.key == key) + ) + except StopIteration: + return None + + return variable.value + + +def merge_variables(first: dict[str, str], second: dict[str, str]) -> dict[str, str]: + """Merge two dictionaries of variables. If a variable exists in both dictionaries + and one value is not already contained in the other, concatenate its values with + a space in between. + + Args: + first: a dictionary containing variables + secondary: a dictionary containing variables + + Returns: + A dictionary containing merged variables + """ + merged = first + for key in second: + if key not in first or first[key] in second[key]: + merged[key] = second[key] + elif second[key] not in first[key]: + merged[key] = f"{first[key]} {second[key]}" + return merged + + def main(): parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]") @@ -108,53 +154,87 @@ def main(): gitlab = Gitlab(args.gitlab_url, private_token=args.token) project = common.get_project(gitlab, args.project) - variables = {} + pipeline = project.pipelines.get(args.pipeline, retry_transient_errors=True) - # Get all successful jobs from deploy stage - deploy_jobs = get_pipeline_jobs( - gitlab, project, args.pipeline, stage=args.deploy_stage + # Get Confluence configuration of given pipeline + confluence_parent_id = get_pipeline_variable_value( + pipeline.variables, "CONFLUENCE_PARENT_ID" ) - successful_deploy_jobs = [job for job in deploy_jobs if job.status == "success"] - if not successful_deploy_jobs: - exit(f"ERROR: no successful jobs found in stage '{args.deploy_stage}'") - - # Get all jobs from build stage - build_jobs = get_pipeline_jobs(gitlab, project, args.pipeline, stage="Build") - - for deploy_job in successful_deploy_jobs: - # Get job suffix after first "-" of the job name - job_suffix = f"{deploy_job.name[deploy_job.name.find('-'):]}" - - # Find build job with same suffix - build_job = None - for job in build_jobs: - if job.name.endswith(job_suffix): - build_job = job - - # Get variables from both jobs - job_env_variables = get_job_env_variables( - gitlab, project, deploy_job, "deploy.env" - ) - if build_job: - job_env_variables |= get_job_env_variables( - gitlab, project, build_job, "build.env" + confluence_space = get_pipeline_variable_value( + pipeline.variables, "CONFLUENCE_SPACE" + ) + + # Get all parallel child pipelines with the same Confluence configuration + pipelines = [] + parent_pipeline = get_parent_pipeline(project, pipeline) + if parent_pipeline: + for bridge in parent_pipeline.bridges.list(all=True): + child_pipeline = project.pipelines.get( + bridge.downstream_pipeline["id"], retry_transient_errors=True ) + if ( + get_pipeline_variable_value( + child_pipeline.variables, "CONFLUENCE_PARENT_ID" + ) + == confluence_parent_id + and get_pipeline_variable_value( + child_pipeline.variables, "CONFLUENCE_SPACE" + ) + == confluence_space + ): + pipelines.append(child_pipeline) - # Append job suffix to the variables - variables |= add_suffix_to_dict_keys( - job_env_variables, job_suffix.replace("-", "_") + # Loop over all found pipelines and combine build and deploy variables from them + variables = {} + for pipeline in pipelines: + + pipeline_variables = {} + print(f"Getting jobs from pipeline {pipeline.web_url}", file=sys.stderr) + + # Get all successful jobs from deploy stage + deploy_jobs = get_pipeline_jobs( + gitlab, project, pipeline.id, stage=args.deploy_stage ) + successful_deploy_jobs = [job for job in deploy_jobs if job.status == "success"] + if not successful_deploy_jobs: + print( + f"No successful jobs found in stage '{args.deploy_stage}'", + file=sys.stderr, + ) - # Furthermore merge all machine-specific values into one space-separated - # variable. - for name in job_env_variables: - if job_env_variables[name]: - variables[name] = ( - f"{variables[name]} {job_env_variables[name]}" - if name in variables and variables[name] != job_env_variables[name] - else job_env_variables[name] + # Get all jobs from build stage + build_jobs = get_pipeline_jobs(gitlab, project, pipeline.id, stage="Build") + + for deploy_job in successful_deploy_jobs: + # Get job suffix after first "-" of the job name + job_suffix = f"{deploy_job.name[deploy_job.name.find('-'):]}" + + # Find build job with same suffix + build_job = None + for job in build_jobs: + if job.name.endswith(job_suffix): + build_job = job + + # Get variables from both jobs + job_env_variables = get_job_env_variables( + gitlab, project, deploy_job, "deploy.env" + ) + if build_job: + job_env_variables |= get_job_env_variables( + gitlab, project, build_job, "build.env" ) + # Append job suffix to the variables + pipeline_variables |= add_suffix_to_dict_keys( + job_env_variables, job_suffix.replace("-", "_") + ) + + # Furthermore merge all machine-specific values into one space-separated + # variable. + pipeline_variables = merge_variables(pipeline_variables, job_env_variables) + + variables = merge_variables(variables, pipeline_variables) + # Print all variables in a shell compatible form for k, v in sorted(variables.items()): print(f'{k}="{v}"') diff --git a/scripts/get_parent_pipeline.py b/scripts/get_parent_pipeline.py new file mode 100755 index 00000000..8a7cbeb7 --- /dev/null +++ b/scripts/get_parent_pipeline.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python3 +import argparse +from typing import Union + +from gitlab.client import Gitlab +from gitlab.v4.objects import Project, ProjectPipeline + +import common + + +def get_parent_pipeline( + project: Project, pipeline: ProjectPipeline +) -> Union[ProjectPipeline, None]: + """Get the parent pipeline within the same project + + Args: + pipeline: Pipeline to get the parent of + + Returns: + Parent pipeline if found, None else + """ + + # Get all pipelines for the same ref + pipelines = project.pipelines.list(ref=pipeline.ref, get_all=True) + + # Browse through all pipelines and check if given pipeline is a child pipeline of it + for p in pipelines: + for bridge in p.bridges.list(): + if bridge.downstream_pipeline["id"] == pipeline.id: + return p + + # Else return nothing + return None + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--gitlab-url", + help="""URL to the GitLab instance""", + dest="gitlab_url", + default=common.GITLAB_URL, + ) + parser.add_argument( + "--token", + help="""GitLab REST API private access token""", + dest="token", + required=True, + ) + parser.add_argument( + "--project", + help="""name of the GitLab project""", + dest="project", + required=True, + ) + parser.add_argument( + "--pipeline", + help="""Pipeline to get parent pipeline of""", + dest="pipeline", + required=True, + ) + + args, _ = parser.parse_known_args() + + gitlab = Gitlab(args.gitlab_url, private_token=args.token) + project = gitlab.projects.get(args.project) + pipeline = project.pipelines.get(args.pipeline) + + parent_pipeline = get_parent_pipeline(project, pipeline) + + if not parent_pipeline: + exit("No parent pipeline found") + + print(parent_pipeline.web_url) + + +if __name__ == "__main__": + main() -- GitLab