From ab09db0b5ed305ecae20a578bf84e5acba3eb93b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jonas=20H=C3=B6ppner?= <jonas.hoeppner@garz-fricke.com> Date: Tue, 12 Jul 2022 11:04:20 +0200 Subject: [PATCH] CI: Lava Test: Allow to install test image directly from gitlab The images from the build job can be directly installed from gitlab. To achive this some changes in the complete pipeline have been needed. 1. The variables used in the build job, like CI_PARAM_IMAGE, ... and related variables like BUILDPATH are only valid in the build job now. 2. The build job writes every variable needed in a follow up job into build.env. This also includes the url to the fng-install.sh of the final image. 3. The build.env file is used as dotenv artifact, as well as normal file artifact. The dotenv make the written variables automatically available in follow up jobs, that are using the aritfacts, like the deploy job. The normal file artifact is available via artifact download. (I did't found a way to download the dotenv file instead) 4. Some scripts have been added: - Find a job inside the pipeline by name, as the id is not known in advance. - Download all artifacts or one file of the artifacts from a given job - Download one file of the latest job by name 5. The scripts are used to download the build.env into the test job (where not artifacts are needed anymore) 6. The script is sourced and all variables are available inside the script. Additionally this adds a fake build job to the ci-test pipeline, that copies an image from srv73 and stores it as artifact in a way that a test-job can run on it, like in the normal yocto pipeline. --- build-common.yml | 36 ++--- build-jobs-ci-test.jinja2 | 64 +++++++-- build-jobs-yocto.jinja2 | 13 +- build-yocto.yml | 78 +++++------ manifest-package.yml | 34 ++--- scripts/download_file_from_latest_job.py | 107 +++++++++++++++ scripts/download_job_artifacts.py | 164 +++++++++++++++++++++++ scripts/get_pipeline_jobs.py | 131 ++++++++++++++++++ 8 files changed, 532 insertions(+), 95 deletions(-) create mode 100755 scripts/download_file_from_latest_job.py create mode 100755 scripts/download_job_artifacts.py create mode 100755 scripts/get_pipeline_jobs.py diff --git a/build-common.yml b/build-common.yml index 4c49833f..16839bbb 100644 --- a/build-common.yml +++ b/build-common.yml @@ -74,13 +74,13 @@ workflow: rules: - when: manual allow_failure: true + dependencies: [] variables: - LOGPREFIX: "CI:test:" + # TODO checkout only gitlab-ci repo to allow running on deleted branches # Include git submodules GIT_SUBMODULE_STRATEGY: recursive CI_PARAM_TEST_SUITE: '{platform}.jinja2' CI_PARAM_EXTRA: --nop - DEPLOYPATH_TEST: /artifacts/${CI_JOB_ID}/ GITLAB_SERVER: "${CI_SERVER_HOST}:${CI_SERVER_SSH_PORT}" GIT_BASE_URL: "ssh://git@${GITLAB_SERVER}/${CI_PROJECT_ROOT_NAMESPACE}" TESTS_GIT_URL: "${GIT_BASE_URL}/yocto/tests.git" @@ -94,18 +94,22 @@ workflow: before_script: - !reference [.setup_ssh] script: + - .gitlab-ci/scripts/download_file_from_latest_job.py + --project $CI_PROJECT_ID + --pipeline $CI_PIPELINE_ID + --token=$GITBOT_TOKEN + --name $CI_PARAM_BUILDJOB + --path build.env + - cat build.env + - source build.env # Submit tests to lava server - - |- - RELEASE=${RELEASE:-$(ls ${DEPLOYPATH_TEST}/)} - INSTALLSCRIPT_ABS="$DEPLOYPATH_TEST/$RELEASE/$CI_PARAM_MACHINE/fng-install.sh" - FNG_INSTALL_URL="${ARTIFACTS_HOST_URL}/${INSTALLSCRIPT_ABS#/*/}" - .gitlab-ci/scripts/submit_test.py \ - --fng-install "${FNG_INSTALL_URL}" \ - --name \ - "Gitlab build test ${CI_PARAM_MACHINE} ${RELEASE} ${CI_PIPELINE_ID}" \ - --results-path "results" \ - --test-repo ${TESTS_GIT_URL} \ - --test-repo-branch ${TEST_REPO_BRANCH} \ - --test-plan ${CI_PARAM_TEST_SUITE} \ - ${CI_PARAM_EXTRA} \ - ${CI_PARAM_PLATFORMS} + - .gitlab-ci/scripts/submit_test.py + --fng-install "$FNG_INSTALL_URL" + --name + "Gitlab $BUILD_MACHINE $BUILD_DISTRO $BUILD_IMAGE ($CI_PIPELINE_ID)" + --results-path "results" + --test-repo $TESTS_GIT_URL + --test-repo-branch $TEST_REPO_BRANCH + --test-plan $CI_PARAM_TEST_SUITE + $CI_PARAM_EXTRA + $CI_PARAM_PLATFORMS diff --git a/build-jobs-ci-test.jinja2 b/build-jobs-ci-test.jinja2 index cfad8a92..3132ff50 100644 --- a/build-jobs-ci-test.jinja2 +++ b/build-jobs-ci-test.jinja2 @@ -61,6 +61,53 @@ build:echo: - printenv - echo "Build successful" +build-imx6guf-fake: + stage: build + needs: [] + tags: + - infrastructure + timeout: 20m + image: ${CI_IMAGE_PYTHON} + rules: + - when: manual + allow_failure: true + variables: + MANIFEST_VERSION: Yocto-dunfell-17.0 + CI_PARAM_MACHINE: imx6guf + CI_PARAM_DISTRO: guf-wayland + CI_PARAM_IMAGE: guf-image + BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}" + IMAGEBASEPATH: "tmp/deploy/images/" + IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}" + LICENSESPATH: "tmp/deploy/licenses" + SDKPATH: "tmp/deploy/sdk/" + INSTALLSCRIPT: "fng-install.sh" + JOB_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/" + FNG_INSTALL_PATH: "${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}" + + script: + # TODO: this becomes similar to the yocto build and may be merged + - echo "BUILD_MACHINE=$CI_PARAM_MACHINE" > build.env + - echo "BUILD_IMAGE=$CI_PARAM_IMAGE" >> build.env + - echo "BUILD_DISTRO=$CI_PARAM_DISTRO" >> build.env + - echo "MANIFEST_VERSION=$MANIFEST_VERSION" >> build.env + - echo "BUILD_PATH_IMAGE=${BUILDPATH}/${IMAGEPATH}" >> build.env + - echo "BUILD_PATH_SDK=${BUILDPATH}/${SDKPATH}" >> build.env + - echo "BUILD_PATH_LICENSE=${BUILDPATH}/${LICENSESPATH}" >> build.env + - echo "FNG_INSTALL_URL=${JOB_URL}${FNG_INSTALL_PATH}" >> build.env + - source build.env + + - echo "Getting yocto build from Z:" + - mkdir -p $BUILD_PATH_IMAGE + - cp -v /artifacts-yocto/Releases/$MANIFEST_VERSION/$CI_PARAM_MACHINE/*.* $BUILD_PATH_IMAGE + - ls $BUILD_PATH_IMAGE + artifacts: + reports: + dotenv: build.env + paths: + - build-guf-wayland-imx6guf/tmp/deploy/images/imx6guf/* + - build.env + # -------------------------------------------------------------------------------------- # Stage: test # -------------------------------------------------------------------------------------- @@ -87,16 +134,15 @@ test:fail: variables: RETURNCODE: 1 -smoketest:imx6guf: - extends: .test +smoketest-imx6guf: + extends: + - .test stage: test + needs: [build-imx6guf-fake] variables: - CI_PARAM_MACHINE: imx6guf + TEST_REPO_BRANCH: dunfell CI_PARAM_PLATFORMS: imx6guf - CI_PARAM_TEST_SUITE: boot.jinja2 CI_PARAM_EXTRA: --all-devices - # Use existing hard-coded release as we don't build in CI-test - ARTIFACTS_HOST_URL: http://srv73 - DEPLOYPATH_TEST: SoftwareStore/Linux-Yocto/Releases/ - RELEASE: Yocto-dunfell-15.0 - TEST_REPO_BRANCH: dunfell + CI_PARAM_TEST_SUITE: boot.jinja2 + CI_PARAM_BUILDJOB: build-imx6guf-fake + diff --git a/build-jobs-yocto.jinja2 b/build-jobs-yocto.jinja2 index 05f0b6a1..9367ca93 100644 --- a/build-jobs-yocto.jinja2 +++ b/build-jobs-yocto.jinja2 @@ -61,11 +61,12 @@ build-{{ machine }}: CI_PARAM_MACHINE: {{ machine }} CI_PARAM_DISTRO: {{ CI_PARAM_DISTRO }} CI_PARAM_IMAGE: {{ CI_PARAM_IMAGE }} - INSTALLSCRIPTS: "fnginstall.sh" + INSTALLSCRIPT: "fng-install.sh" artifacts: paths: - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/images/{{ machine }}/* - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/licenses/**/license.manifest + - build.env # Build jobs for the sdk buildsdk-{{ machine }}: @@ -78,6 +79,7 @@ buildsdk-{{ machine }}: artifacts: paths: - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/sdk/* + - build.env # Deploy jobs for the yocto image deployimage-{{ machine }}: @@ -124,11 +126,11 @@ uploadftp-sdk-{{ machine }}: smoketest:{{ machine }}: extends: - .test - - .test_override stage: test needs: - job: build-{{ machine }} variables: + CI_PARAM_BUILDJOB: build-{{ machine }} CI_PARAM_MACHINE: {{ machine }} CI_PARAM_PLATFORMS: {{ machine }} CI_PARAM_TEST_SUITE: boot.jinja2 @@ -137,11 +139,11 @@ smoketest:{{ machine }}: platformtest:{{ machine }}: extends: - .test - - .test_override stage: test needs: - job: build-{{ machine }} variables: + CI_PARAM_BUILDJOB: build-{{ machine }} CI_PARAM_MACHINE: {{ machine }} CI_PARAM_PLATFORMS: {{ platforms }} {% endif %} @@ -152,17 +154,18 @@ platformtest:{{ machine }}: # Build jobs for the fng system image build-{{ machine }}-fngsystem: - extends: .buildfng + extends: .buildimage stage: build variables: CI_PARAM_MACHINE: {{ machine }} CI_PARAM_DISTRO: {{ CI_PARAM_DISTRO_FNG }} CI_PARAM_IMAGE: {{ CI_PARAM_IMAGE_FNG }} - INSTALLSCRIPTS: "fngsystem-self-update.sh" + INSTALLSCRIPT: "fngsystem-self-update.sh" artifacts: paths: - build-{{ CI_PARAM_DISTRO_FNG }}-{{ machine }}/tmp/deploy/images/{{ machine }}/* - build-{{ CI_PARAM_DISTRO_FNG }}-{{ machine }}/tmp/deploy/licenses/**/license.manifest + - build.env # Deploy jobs for the fngsystem image deployimage-{{ machine }}-fngsystem: diff --git a/build-yocto.yml b/build-yocto.yml index 21acf89d..2a8a3931 100644 --- a/build-yocto.yml +++ b/build-yocto.yml @@ -1,10 +1,4 @@ --- -variables: - BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}" - IMAGEBASEPATH: "tmp/deploy/images/" - IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}" - LICENSESPATH: "tmp/deploy/licenses" - SDKPATH: "tmp/deploy/sdk/" .collect_srcrevs: &collect_srcrevs # write all package AUTOREVS to file @@ -19,21 +13,33 @@ variables: .dump_install_command: &dump_install_command # print install instructions - |- - for i in ${INSTALLSCRIPTS};do - SCRIPT="${CI_PROJECT_DIR}/${BUILDPATH}/${IMAGEPATH}/${i}" - if [[ -f "${SCRIPT}" ]]; then - cat <<-EOF + SCRIPT="${CI_PROJECT_DIR}/${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}" + if [ ! -f "${SCRIPT}" ]; then + echo "Install script missing, searched for '$SCRIPT'" + exit 1 + fi + + if [ "$CI_PROJECT_VISIBILITY" = "public" ];then + cat <<-EOF + ============================== + Install the image: + + FNG="$FNG_INSTALL_URL" + curl --location "\$FNG" | sh -s -- --url="\$(dirname "\$FNG")" + ============================== + EOF + else + cat <<-EOF ============================== Install the image: export GITLAB_TOKEN=<your_access_token> - FNG="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/${BUILDPATH}/${IMAGEPATH}/${i}" + FNG="$FNG_INSTALL_URL" curl --location --header "PRIVATE-TOKEN: \$GITLAB_TOKEN" "\$FNG" \ | sh -s -- --url="\$(dirname "\$FNG")" ============================== EOF fi - done .build_script: &build_script # setup build environment @@ -42,6 +48,11 @@ variables: - echo "BUILD_MACHINE=$CI_PARAM_MACHINE" > build.env - echo "BUILD_IMAGE=$CI_PARAM_IMAGE" >> build.env - echo "BUILD_DISTRO=$CI_PARAM_DISTRO" >> build.env + - echo "BUILD_PATH_IMAGE=${BUILDPATH}/${IMAGEPATH}" >> build.env + - echo "BUILD_PATH_SDK=${BUILDPATH}/${SDKPATH}" >> build.env + - echo "BUILD_PATH_LICENSE=${BUILDPATH}/${LICENSESPATH}" >> build.env + - echo "FNG_INSTALL_URL=${JOB_URL}${FNG_INSTALL_PATH}" >> build.env + - source build.env - echo "${LOGPREFIX} Using build dir ${BUILDPATH}" - export MACHINE="${CI_PARAM_MACHINE}" - export DISTRO="${CI_PARAM_DISTRO}" @@ -53,12 +64,22 @@ variables: - bitbake "${CI_PARAM_IMAGE}" -c "${BITBAKE_TASK}" - echo -e "section_end:`date +%s`:bitbake_run\r\e[0K" -.build: +.buildimage: + extends: + - .buildbase + needs: [] variables: GIT_STRATEGY: none SETUPSCRIPT: "setup-environment" BITBAKE_TASK: "build" LOGPREFIX: "CI:build:" + BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}" + IMAGEBASEPATH: "tmp/deploy/images/" + IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}" + LICENSESPATH: "tmp/deploy/licenses" + SDKPATH: "tmp/deploy/sdk/" + JOB_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/" + FNG_INSTALL_PATH: "${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}" before_script: - !reference [.docker_check] - !reference [.setup_ssh] @@ -74,19 +95,6 @@ variables: # -------------------------------------------------------------------------------------- # Stage: build # -------------------------------------------------------------------------------------- -.buildimage: - extends: - - .buildbase - - .build - needs: [] - -.buildfng: - extends: - - .buildimage - variables: - CI_PARAM_IMAGE: ${CI_PARAM_IMAGE_FNG} - CI_PARAM_DISTRO: ${CI_PARAM_DISTRO_FNG} - .buildsdk: extends: - .buildimage @@ -97,24 +105,6 @@ variables: variables: BITBAKE_TASK: "populate_sdk" -# -------------------------------------------------------------------------------------- -# Stage: test -# -------------------------------------------------------------------------------------- -.test_override: - before_script: - - !reference [.setup_ssh] - # Copy artifacts to local server for automated tests - # This is necessary because the LAVA devices cannot install directly from GitLab - # due to missing HTTPS support in FNGSystem. - - |- - echo "${LOGPREFIX} Copy files for automated tests using package_release" - .gitlab-ci/scripts/package_release.py \ - --images-dir="${BUILDPATH}/${IMAGEPATH}" \ - --outputdir-local="${DEPLOYPATH_TEST}" - after_script: - # Remove artifacts after testing - - rm -r "${DEPLOYPATH_TEST}" - # -------------------------------------------------------------------------------------- # Stage: deploy # -------------------------------------------------------------------------------------- diff --git a/manifest-package.yml b/manifest-package.yml index fc9df429..c91986b7 100644 --- a/manifest-package.yml +++ b/manifest-package.yml @@ -8,24 +8,17 @@ # Package release files # TODO: At the moment this script is used for # - image deployment, - # - sdk build, + # - sdk deployment, # - image and sdk ftp upload # - and artifact deployment for automated tests. # Some usecases require individual processing, which leads to # high script complexity. That should be improved. echo "${LOGPREFIX} Assemble build variables" - # We need to source the build.env file which is generated by the - # previous build step. This is a workaround to get the environment - # variables in the after_script step. - [ -e build.env ] && source build.env - BUILDPATH="build-${BUILD_DISTRO}-${BUILD_MACHINE}" - [ -z "${ARTIFACTS_IMAGE_PATH}" ] && \ - ARTIFACTS_IMAGE_PATH="${BUILDPATH}/${IMAGEBASEPATH}/${BUILD_MACHINE}" - [ -z "${ARTIFACTS_LICENSES_PATH}" ] && \ - ARTIFACTS_LICENSES_PATH="${BUILDPATH}/${LICENSESPATH}" - [ -z "${ARTIFACTS_SDK_PATH}" ] && \ - ARTIFACTS_SDK_PATH="${BUILDPATH}/${SDKPATH}" + # Variables set by build.env from the build artifacts + ARTIFACTS_IMAGE_PATH="$BUILD_PATH_IMAGE" + ARTIFACTS_LICENSES_PATH="$BUILD_PATH_LICENSE" + ARTIFACTS_SDK_PATH="$BUILD_PATH_SDK" if ${CI_PARAM_PACKAGE_FTP}; then UPLOAD_PARAM="" # don't store as gitlab artifact @@ -37,23 +30,22 @@ # If we are on the master branch and a tag is set # we tread it as release - if [ -n "${CI_COMMIT_TAG}" ];then + if [ -n "$CI_COMMIT_TAG" ];then outdir="${OUTDIR_BASE}-yocto/Releases" - [ "${BUILD_DISTRO}" = "guf-fngsystem" ] && \ + [ "$BUILD_DISTRO" = "guf-fngsystem" ] && \ outdir="${OUTDIR_BASE}-fngsystem" else outdir="${OUTDIR_BASE}-yocto/Interne_Releases" - [ "${BUILD_DISTRO}" = "guf-fngsystem" ] && \ + [ "$BUILD_DISTRO" = "guf-fngsystem" ] && \ outdir="${OUTDIR_BASE}-fngsystem/CI_Builds" fi # Print vars for debugging purposes - echo "${LOGPREFIX} BUILDPATH=${BUILDPATH}" - echo "${LOGPREFIX} ARTIFACTS_IMAGE_PATH=${ARTIFACTS_IMAGE_PATH}" - echo "${LOGPREFIX} ARTIFACTS_LICENSES_PATH=${ARTIFACTS_LICENSES_PATH}" - echo "${LOGPREFIX} ARTIFACTS_SDK_PATH=${ARTIFACTS_SDK_PATH}" - echo "${LOGPREFIX} UPLOAD_PARAM=${UPLOAD_PARAM}" - echo "${LOGPREFIX} outdir=${outdir}" + echo "${LOGPREFIX} ARTIFACTS_IMAGE_PATH=$ARTIFACTS_IMAGE_PATH" + echo "${LOGPREFIX} ARTIFACTS_LICENSES_PATH=$ARTIFACTS_LICENSES_PATH" + echo "${LOGPREFIX} ARTIFACTS_SDK_PATH=$ARTIFACTS_SDK_PATH" + echo "${LOGPREFIX} UPLOAD_PARAM=$UPLOAD_PARAM" + echo "${LOGPREFIX} outdir=$outdir" # Check if the package_release script is available script=".gitlab-ci/scripts/package_release.py" diff --git a/scripts/download_file_from_latest_job.py b/scripts/download_file_from_latest_job.py new file mode 100755 index 00000000..968586a8 --- /dev/null +++ b/scripts/download_file_from_latest_job.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 +""" + +Downloads given file of the artifacts of a pipeline job. + +""" + +import argparse +import logging +import os +import sys + +import gitlab as gl + +__author__ = "Jonas Höppner" +__email__ = "jonas.hoeppner@garz-fricke.com" + +from download_job_artifacts import download_job_artifact +from get_pipeline_jobs import get_pipeline_jobs + +GITLAB_SERVER = "https://git.seco.com" + +verbose = 0 + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]") + + parser.add_argument( + "--gitlab-url", + help="""URL to the GitLab instance""", + dest="gitlab_url", + action="store", + default=GITLAB_SERVER, + ) + parser.add_argument( + "--token", + help="""GitLab REST API private access token""", + dest="token", + required=True, + ) + parser.add_argument( + "--project", + action="store", + dest="project", + help="Specify the project by either by id or by path.", + required=True, + ) + parser.add_argument( + "--pipeline", + action="store", + dest="pipeline", + help="Specify the pipeline by id.", + ) + parser.add_argument( + "-s", + "--stage", + action="store", + default=None, + help="Filter the jobs by the given stage, if omnitted all jobs are returned.", + ) + parser.add_argument( + "-n", + "--name", + action="store", + default=None, + help="Filter the jobs by given name, if omnitted all jobs are returned.", + ) + parser.add_argument( + "--path", + action="store", + default=None, + help="Path inside the artifacts, if set only one single file is downloaded instead of the complete artifacts.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + dest="verbose", + default=0, + help="Increase verbosity.", + ) + + options = parser.parse_args(args) + if options.verbose: + logging.basicConfig(level=logging.DEBUG) + + logging.debug(options) + gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token) + jobs = get_pipeline_jobs( + gitlab, options.project, options.pipeline, options.name, options.stage + ) + + def sort_by_finish_ts(j): + return j.finished_at + + jobs.sort(key=sort_by_finish_ts) + job = jobs[0] + + filename = download_job_artifact( + gitlab, dest=os.path.basename(options.path), path=options.path, job=job + ) + print("Downloaded {} for job {} to {}".format(options.path, job.name, filename)) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/scripts/download_job_artifacts.py b/scripts/download_job_artifacts.py new file mode 100755 index 00000000..07717b1e --- /dev/null +++ b/scripts/download_job_artifacts.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +""" + +Downloads the job artifacts of a given job + +""" + +import argparse +import logging +import sys +import os +import zipfile +import tempfile +import gitlab as gl + +__author__ = "Jonas Höppner" +__email__ = "jonas.hoeppner@garz-fricke.com" + +GITLAB_SERVER = "https://git.seco.com" + +verbose = 0 + + +def download_job_artifacts(gitlab, dest, job, project=None, extract=False): + """Downloads the artifacts and stores them ar dest/job_id + If extract is set, the downloaded zipfile is extracted and removed. + Returns either the dest path or the zipfile name. + """ + # Accept either a gitlab job object, or the project and the job id + if isinstance(job, gl.v4.objects.jobs.ProjectJob): + gl_job = job + else: + # Accept either an gitlab object, or name or id to specify a project + if isinstance(project, gl.v4.objects.projects.Project): + gl_project = project + else: + gl_project = gitlab.projects.get(project) + gl_job = gl_project.jobs.get(job) + + dest = os.path.join(dest, str(gl_job.id)) + os.makedirs(dest) + file_name = os.path.join(dest, "artifacts.zip") + with open(file_name, "wb") as f: + gl_job.artifacts(streamed=True, action=f.write) + if not extract: + return file_name + + zipfile.ZipFile(file_name).extractall(dest) + os.remove(file_name) + return dest + + +def download_job_artifact(gitlab, dest, path, job, project=None): + """Downloads one file from artifacts and stores it at dest""" + # Accept either a gitlab job object, or the project and the job id + if isinstance(job, gl.v4.objects.jobs.ProjectJob): + gl_job = job + else: + # Accept either an gitlab object, or name or id to specify a project + if isinstance(project, gl.v4.objects.projects.Project): + gl_project = project + else: + gl_project = gitlab.projects.get(project) + gl_job = gl_project.jobs.get(job) + + with open(dest, "wb") as f: + gl_job.artifact(path=path, streamed=True, action=f.write) + + return dest + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]") + + parser.add_argument( + "--gitlab-url", + help="""URL to the GitLab instance""", + dest="gitlab_url", + action="store", + default=GITLAB_SERVER, + ) + parser.add_argument( + "--token", + help="""GitLab REST API private access token""", + dest="token", + required=True, + ) + parser.add_argument( + "--project", + action="store", + dest="project", + help="Specify the project by either by id or by path.", + required=True, + ) + parser.add_argument( + "--job", + action="store", + dest="job", + help="Specify the job by id.", + required=True, + ) + parser.add_argument( + "--extract", + action="store_true", + dest="job", + default=False, + help="Specify if the artifacts should be extracted after download.", + ) + parser.add_argument( + "--dest", + "--destination", + action="store", + dest="destination", + default=None, + help="Folder where the artifacts are stored in, a local tmpfolder is generated if omnitted.", + ) + parser.add_argument( + "--path", + action="store", + default=None, + help="Path inside the artifacts, if set only one single file is downloaded instead of the complete artifacts.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + dest="verbose", + default=0, + help="Increase verbosity.", + ) + + options = parser.parse_args(args) + if options.verbose: + logging.basicConfig(level=logging.DEBUG) + + logging.debug(options) + gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token) + + if options.path is None: + if options.destination is None: + destination = tempfile.mkstemp() + else: + destination = options.destination + + filename = download_job_artifacts( + gitlab, destination, options.job, options.project, extract=True + ) + print("Downloaded artifacts for job {} to {}".format(options.job, filename)) + else: + if options.destination is None: + destination = tempfile.mkdtemp() + else: + destination = options.destination + + filename = download_job_artifact( + gitlab, destination, options.path, options.job, options.project + ) + print( + "Downloaded {} for job {} to {}".format(options.path, options.job, filename) + ) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/scripts/get_pipeline_jobs.py b/scripts/get_pipeline_jobs.py new file mode 100755 index 00000000..71f7f12e --- /dev/null +++ b/scripts/get_pipeline_jobs.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 +""" + +Queries the jobs of a given pipeline, filtered by stage + +""" + +import argparse +import logging +import sys +import gitlab as gl + +__author__ = "Jonas Höppner" +__email__ = "jonas.hoeppner@garz-fricke.com" + +GITLAB_SERVER = "https://git.seco.com" + +verbose = 0 + + +def get_pipeline_pipelinejobs(gitlab, project, pipeline, name=None, stage=None): + # Accept either an gitlab object, or name or id to specify a project + if isinstance(project, gl.v4.objects.projects.Project): + gl_project = project + else: + gl_project = gitlab.projects.get(project) + + gl_pipeline = gl_project.pipelines.get(pipeline) + jobs = gl_pipeline.jobs.list(all=True) + if stage is not None: + jobs = list(filter(lambda j: j.stage == stage, jobs)) + if name is not None: + jobs = list(filter(lambda j: j.name == name, jobs)) + + return jobs + + +def get_pipeline_jobs(gitlab, project, pipeline, name=None, stage=None): + # Accept either an gitlab object, or name or id to specify a project + if isinstance(project, gl.v4.objects.projects.Project): + gl_project = project + else: + gl_project = gitlab.projects.get(project) + + pipeline_jobs = get_pipeline_pipelinejobs(gitlab, project, pipeline, name, stage) + jobs = [] + # Project Jobs from Pipeline Job + for job in pipeline_jobs: + jobs.append(gl_project.jobs.get(job.id)) + return jobs + + +def get_pipeline_job_ids(gitlab, project, pipeline, name=None, stage=None): + jobs = get_pipeline_pipelinejobs(gitlab, project, pipeline, name, stage) + return [j.id for j in jobs] + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]") + + parser.add_argument( + "--gitlab-url", + help="""URL to the GitLab instance""", + dest="gitlab_url", + action="store", + default=GITLAB_SERVER, + ) + parser.add_argument( + "--token", + help="""GitLab REST API private access token""", + dest="token", + required=True, + ) + parser.add_argument( + "--project", + action="store", + dest="project", + help="Specify the project by either by id or by path.", + required=True, + ) + parser.add_argument( + "--pipeline", + action="store", + dest="pipeline", + help="Specify the pipeline by id.", + ) + parser.add_argument( + "-s", + "--stage", + action="store", + default=None, + help="Filter the jobs by the given stage, if omnitted all jobs are returned.", + ) + parser.add_argument( + "-n", + "--name", + action="store", + default=None, + help="Filter the jobs by given name, if omnitted all jobs are returned.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + dest="verbose", + default=0, + help="Increase verbosity.", + ) + + options = parser.parse_args(args) + if options.verbose: + logging.basicConfig(level=logging.DEBUG) + + logging.debug(options) + gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token) + jobs = get_pipeline_jobs( + gitlab, options.project, options.pipeline, options.name, options.stage + ) + + for j in jobs: + print(j.name, j.id) + + job_ids = get_pipeline_job_ids( + gitlab, options.project, options.pipeline, options.name, options.stage + ) + for j in job_ids: + print(j) + + +if __name__ == "__main__": + main(sys.argv[1:]) -- GitLab