diff --git a/build-common.yml b/build-common.yml index 4c49833f002e207b92a610ed1892be7d4af694ab..16839bbb45f4785f90849ff74495f373fc8160f4 100644 --- a/build-common.yml +++ b/build-common.yml @@ -74,13 +74,13 @@ workflow: rules: - when: manual allow_failure: true + dependencies: [] variables: - LOGPREFIX: "CI:test:" + # TODO checkout only gitlab-ci repo to allow running on deleted branches # Include git submodules GIT_SUBMODULE_STRATEGY: recursive CI_PARAM_TEST_SUITE: '{platform}.jinja2' CI_PARAM_EXTRA: --nop - DEPLOYPATH_TEST: /artifacts/${CI_JOB_ID}/ GITLAB_SERVER: "${CI_SERVER_HOST}:${CI_SERVER_SSH_PORT}" GIT_BASE_URL: "ssh://git@${GITLAB_SERVER}/${CI_PROJECT_ROOT_NAMESPACE}" TESTS_GIT_URL: "${GIT_BASE_URL}/yocto/tests.git" @@ -94,18 +94,22 @@ workflow: before_script: - !reference [.setup_ssh] script: + - .gitlab-ci/scripts/download_file_from_latest_job.py + --project $CI_PROJECT_ID + --pipeline $CI_PIPELINE_ID + --token=$GITBOT_TOKEN + --name $CI_PARAM_BUILDJOB + --path build.env + - cat build.env + - source build.env # Submit tests to lava server - - |- - RELEASE=${RELEASE:-$(ls ${DEPLOYPATH_TEST}/)} - INSTALLSCRIPT_ABS="$DEPLOYPATH_TEST/$RELEASE/$CI_PARAM_MACHINE/fng-install.sh" - FNG_INSTALL_URL="${ARTIFACTS_HOST_URL}/${INSTALLSCRIPT_ABS#/*/}" - .gitlab-ci/scripts/submit_test.py \ - --fng-install "${FNG_INSTALL_URL}" \ - --name \ - "Gitlab build test ${CI_PARAM_MACHINE} ${RELEASE} ${CI_PIPELINE_ID}" \ - --results-path "results" \ - --test-repo ${TESTS_GIT_URL} \ - --test-repo-branch ${TEST_REPO_BRANCH} \ - --test-plan ${CI_PARAM_TEST_SUITE} \ - ${CI_PARAM_EXTRA} \ - ${CI_PARAM_PLATFORMS} + - .gitlab-ci/scripts/submit_test.py + --fng-install "$FNG_INSTALL_URL" + --name + "Gitlab $BUILD_MACHINE $BUILD_DISTRO $BUILD_IMAGE ($CI_PIPELINE_ID)" + --results-path "results" + --test-repo $TESTS_GIT_URL + --test-repo-branch $TEST_REPO_BRANCH + --test-plan $CI_PARAM_TEST_SUITE + $CI_PARAM_EXTRA + $CI_PARAM_PLATFORMS diff --git a/build-jobs-ci-test.jinja2 b/build-jobs-ci-test.jinja2 index cfad8a92c0de4e21536b5d2d7217d8a468d11f23..3132ff50de0b58287198a9a9049d87ff8533db1a 100644 --- a/build-jobs-ci-test.jinja2 +++ b/build-jobs-ci-test.jinja2 @@ -61,6 +61,53 @@ build:echo: - printenv - echo "Build successful" +build-imx6guf-fake: + stage: build + needs: [] + tags: + - infrastructure + timeout: 20m + image: ${CI_IMAGE_PYTHON} + rules: + - when: manual + allow_failure: true + variables: + MANIFEST_VERSION: Yocto-dunfell-17.0 + CI_PARAM_MACHINE: imx6guf + CI_PARAM_DISTRO: guf-wayland + CI_PARAM_IMAGE: guf-image + BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}" + IMAGEBASEPATH: "tmp/deploy/images/" + IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}" + LICENSESPATH: "tmp/deploy/licenses" + SDKPATH: "tmp/deploy/sdk/" + INSTALLSCRIPT: "fng-install.sh" + JOB_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/" + FNG_INSTALL_PATH: "${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}" + + script: + # TODO: this becomes similar to the yocto build and may be merged + - echo "BUILD_MACHINE=$CI_PARAM_MACHINE" > build.env + - echo "BUILD_IMAGE=$CI_PARAM_IMAGE" >> build.env + - echo "BUILD_DISTRO=$CI_PARAM_DISTRO" >> build.env + - echo "MANIFEST_VERSION=$MANIFEST_VERSION" >> build.env + - echo "BUILD_PATH_IMAGE=${BUILDPATH}/${IMAGEPATH}" >> build.env + - echo "BUILD_PATH_SDK=${BUILDPATH}/${SDKPATH}" >> build.env + - echo "BUILD_PATH_LICENSE=${BUILDPATH}/${LICENSESPATH}" >> build.env + - echo "FNG_INSTALL_URL=${JOB_URL}${FNG_INSTALL_PATH}" >> build.env + - source build.env + + - echo "Getting yocto build from Z:" + - mkdir -p $BUILD_PATH_IMAGE + - cp -v /artifacts-yocto/Releases/$MANIFEST_VERSION/$CI_PARAM_MACHINE/*.* $BUILD_PATH_IMAGE + - ls $BUILD_PATH_IMAGE + artifacts: + reports: + dotenv: build.env + paths: + - build-guf-wayland-imx6guf/tmp/deploy/images/imx6guf/* + - build.env + # -------------------------------------------------------------------------------------- # Stage: test # -------------------------------------------------------------------------------------- @@ -87,16 +134,15 @@ test:fail: variables: RETURNCODE: 1 -smoketest:imx6guf: - extends: .test +smoketest-imx6guf: + extends: + - .test stage: test + needs: [build-imx6guf-fake] variables: - CI_PARAM_MACHINE: imx6guf + TEST_REPO_BRANCH: dunfell CI_PARAM_PLATFORMS: imx6guf - CI_PARAM_TEST_SUITE: boot.jinja2 CI_PARAM_EXTRA: --all-devices - # Use existing hard-coded release as we don't build in CI-test - ARTIFACTS_HOST_URL: http://srv73 - DEPLOYPATH_TEST: SoftwareStore/Linux-Yocto/Releases/ - RELEASE: Yocto-dunfell-15.0 - TEST_REPO_BRANCH: dunfell + CI_PARAM_TEST_SUITE: boot.jinja2 + CI_PARAM_BUILDJOB: build-imx6guf-fake + diff --git a/build-jobs-yocto.jinja2 b/build-jobs-yocto.jinja2 index 05f0b6a1469a5a97b3c1824fb8481657eab6cfc1..9367ca933cdf474a28b80f29f1e651cc8454d9b8 100644 --- a/build-jobs-yocto.jinja2 +++ b/build-jobs-yocto.jinja2 @@ -61,11 +61,12 @@ build-{{ machine }}: CI_PARAM_MACHINE: {{ machine }} CI_PARAM_DISTRO: {{ CI_PARAM_DISTRO }} CI_PARAM_IMAGE: {{ CI_PARAM_IMAGE }} - INSTALLSCRIPTS: "fnginstall.sh" + INSTALLSCRIPT: "fng-install.sh" artifacts: paths: - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/images/{{ machine }}/* - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/licenses/**/license.manifest + - build.env # Build jobs for the sdk buildsdk-{{ machine }}: @@ -78,6 +79,7 @@ buildsdk-{{ machine }}: artifacts: paths: - build-{{ CI_PARAM_DISTRO }}-{{ machine }}/tmp/deploy/sdk/* + - build.env # Deploy jobs for the yocto image deployimage-{{ machine }}: @@ -124,11 +126,11 @@ uploadftp-sdk-{{ machine }}: smoketest:{{ machine }}: extends: - .test - - .test_override stage: test needs: - job: build-{{ machine }} variables: + CI_PARAM_BUILDJOB: build-{{ machine }} CI_PARAM_MACHINE: {{ machine }} CI_PARAM_PLATFORMS: {{ machine }} CI_PARAM_TEST_SUITE: boot.jinja2 @@ -137,11 +139,11 @@ smoketest:{{ machine }}: platformtest:{{ machine }}: extends: - .test - - .test_override stage: test needs: - job: build-{{ machine }} variables: + CI_PARAM_BUILDJOB: build-{{ machine }} CI_PARAM_MACHINE: {{ machine }} CI_PARAM_PLATFORMS: {{ platforms }} {% endif %} @@ -152,17 +154,18 @@ platformtest:{{ machine }}: # Build jobs for the fng system image build-{{ machine }}-fngsystem: - extends: .buildfng + extends: .buildimage stage: build variables: CI_PARAM_MACHINE: {{ machine }} CI_PARAM_DISTRO: {{ CI_PARAM_DISTRO_FNG }} CI_PARAM_IMAGE: {{ CI_PARAM_IMAGE_FNG }} - INSTALLSCRIPTS: "fngsystem-self-update.sh" + INSTALLSCRIPT: "fngsystem-self-update.sh" artifacts: paths: - build-{{ CI_PARAM_DISTRO_FNG }}-{{ machine }}/tmp/deploy/images/{{ machine }}/* - build-{{ CI_PARAM_DISTRO_FNG }}-{{ machine }}/tmp/deploy/licenses/**/license.manifest + - build.env # Deploy jobs for the fngsystem image deployimage-{{ machine }}-fngsystem: diff --git a/build-yocto.yml b/build-yocto.yml index 21acf89df87a1326d4c2ca1dfba708fa4b4fafd1..2a8a39318f5747b2e811d427df0d2e196c381ef6 100644 --- a/build-yocto.yml +++ b/build-yocto.yml @@ -1,10 +1,4 @@ --- -variables: - BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}" - IMAGEBASEPATH: "tmp/deploy/images/" - IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}" - LICENSESPATH: "tmp/deploy/licenses" - SDKPATH: "tmp/deploy/sdk/" .collect_srcrevs: &collect_srcrevs # write all package AUTOREVS to file @@ -19,21 +13,33 @@ variables: .dump_install_command: &dump_install_command # print install instructions - |- - for i in ${INSTALLSCRIPTS};do - SCRIPT="${CI_PROJECT_DIR}/${BUILDPATH}/${IMAGEPATH}/${i}" - if [[ -f "${SCRIPT}" ]]; then - cat <<-EOF + SCRIPT="${CI_PROJECT_DIR}/${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}" + if [ ! -f "${SCRIPT}" ]; then + echo "Install script missing, searched for '$SCRIPT'" + exit 1 + fi + + if [ "$CI_PROJECT_VISIBILITY" = "public" ];then + cat <<-EOF + ============================== + Install the image: + + FNG="$FNG_INSTALL_URL" + curl --location "\$FNG" | sh -s -- --url="\$(dirname "\$FNG")" + ============================== + EOF + else + cat <<-EOF ============================== Install the image: export GITLAB_TOKEN=<your_access_token> - FNG="${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/${BUILDPATH}/${IMAGEPATH}/${i}" + FNG="$FNG_INSTALL_URL" curl --location --header "PRIVATE-TOKEN: \$GITLAB_TOKEN" "\$FNG" \ | sh -s -- --url="\$(dirname "\$FNG")" ============================== EOF fi - done .build_script: &build_script # setup build environment @@ -42,6 +48,11 @@ variables: - echo "BUILD_MACHINE=$CI_PARAM_MACHINE" > build.env - echo "BUILD_IMAGE=$CI_PARAM_IMAGE" >> build.env - echo "BUILD_DISTRO=$CI_PARAM_DISTRO" >> build.env + - echo "BUILD_PATH_IMAGE=${BUILDPATH}/${IMAGEPATH}" >> build.env + - echo "BUILD_PATH_SDK=${BUILDPATH}/${SDKPATH}" >> build.env + - echo "BUILD_PATH_LICENSE=${BUILDPATH}/${LICENSESPATH}" >> build.env + - echo "FNG_INSTALL_URL=${JOB_URL}${FNG_INSTALL_PATH}" >> build.env + - source build.env - echo "${LOGPREFIX} Using build dir ${BUILDPATH}" - export MACHINE="${CI_PARAM_MACHINE}" - export DISTRO="${CI_PARAM_DISTRO}" @@ -53,12 +64,22 @@ variables: - bitbake "${CI_PARAM_IMAGE}" -c "${BITBAKE_TASK}" - echo -e "section_end:`date +%s`:bitbake_run\r\e[0K" -.build: +.buildimage: + extends: + - .buildbase + needs: [] variables: GIT_STRATEGY: none SETUPSCRIPT: "setup-environment" BITBAKE_TASK: "build" LOGPREFIX: "CI:build:" + BUILDPATH: "build-${CI_PARAM_DISTRO}-${CI_PARAM_MACHINE}" + IMAGEBASEPATH: "tmp/deploy/images/" + IMAGEPATH: "${IMAGEBASEPATH}/${CI_PARAM_MACHINE}" + LICENSESPATH: "tmp/deploy/licenses" + SDKPATH: "tmp/deploy/sdk/" + JOB_URL: "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/${CI_JOB_ID}/artifacts/" + FNG_INSTALL_PATH: "${BUILDPATH}/${IMAGEPATH}/${INSTALLSCRIPT}" before_script: - !reference [.docker_check] - !reference [.setup_ssh] @@ -74,19 +95,6 @@ variables: # -------------------------------------------------------------------------------------- # Stage: build # -------------------------------------------------------------------------------------- -.buildimage: - extends: - - .buildbase - - .build - needs: [] - -.buildfng: - extends: - - .buildimage - variables: - CI_PARAM_IMAGE: ${CI_PARAM_IMAGE_FNG} - CI_PARAM_DISTRO: ${CI_PARAM_DISTRO_FNG} - .buildsdk: extends: - .buildimage @@ -97,24 +105,6 @@ variables: variables: BITBAKE_TASK: "populate_sdk" -# -------------------------------------------------------------------------------------- -# Stage: test -# -------------------------------------------------------------------------------------- -.test_override: - before_script: - - !reference [.setup_ssh] - # Copy artifacts to local server for automated tests - # This is necessary because the LAVA devices cannot install directly from GitLab - # due to missing HTTPS support in FNGSystem. - - |- - echo "${LOGPREFIX} Copy files for automated tests using package_release" - .gitlab-ci/scripts/package_release.py \ - --images-dir="${BUILDPATH}/${IMAGEPATH}" \ - --outputdir-local="${DEPLOYPATH_TEST}" - after_script: - # Remove artifacts after testing - - rm -r "${DEPLOYPATH_TEST}" - # -------------------------------------------------------------------------------------- # Stage: deploy # -------------------------------------------------------------------------------------- diff --git a/manifest-package.yml b/manifest-package.yml index fc9df42974d23c39861de791fec87c76dff78ab2..c91986b791c28f6ea8f42d541eab48384ba99545 100644 --- a/manifest-package.yml +++ b/manifest-package.yml @@ -8,24 +8,17 @@ # Package release files # TODO: At the moment this script is used for # - image deployment, - # - sdk build, + # - sdk deployment, # - image and sdk ftp upload # - and artifact deployment for automated tests. # Some usecases require individual processing, which leads to # high script complexity. That should be improved. echo "${LOGPREFIX} Assemble build variables" - # We need to source the build.env file which is generated by the - # previous build step. This is a workaround to get the environment - # variables in the after_script step. - [ -e build.env ] && source build.env - BUILDPATH="build-${BUILD_DISTRO}-${BUILD_MACHINE}" - [ -z "${ARTIFACTS_IMAGE_PATH}" ] && \ - ARTIFACTS_IMAGE_PATH="${BUILDPATH}/${IMAGEBASEPATH}/${BUILD_MACHINE}" - [ -z "${ARTIFACTS_LICENSES_PATH}" ] && \ - ARTIFACTS_LICENSES_PATH="${BUILDPATH}/${LICENSESPATH}" - [ -z "${ARTIFACTS_SDK_PATH}" ] && \ - ARTIFACTS_SDK_PATH="${BUILDPATH}/${SDKPATH}" + # Variables set by build.env from the build artifacts + ARTIFACTS_IMAGE_PATH="$BUILD_PATH_IMAGE" + ARTIFACTS_LICENSES_PATH="$BUILD_PATH_LICENSE" + ARTIFACTS_SDK_PATH="$BUILD_PATH_SDK" if ${CI_PARAM_PACKAGE_FTP}; then UPLOAD_PARAM="" # don't store as gitlab artifact @@ -37,23 +30,22 @@ # If we are on the master branch and a tag is set # we tread it as release - if [ -n "${CI_COMMIT_TAG}" ];then + if [ -n "$CI_COMMIT_TAG" ];then outdir="${OUTDIR_BASE}-yocto/Releases" - [ "${BUILD_DISTRO}" = "guf-fngsystem" ] && \ + [ "$BUILD_DISTRO" = "guf-fngsystem" ] && \ outdir="${OUTDIR_BASE}-fngsystem" else outdir="${OUTDIR_BASE}-yocto/Interne_Releases" - [ "${BUILD_DISTRO}" = "guf-fngsystem" ] && \ + [ "$BUILD_DISTRO" = "guf-fngsystem" ] && \ outdir="${OUTDIR_BASE}-fngsystem/CI_Builds" fi # Print vars for debugging purposes - echo "${LOGPREFIX} BUILDPATH=${BUILDPATH}" - echo "${LOGPREFIX} ARTIFACTS_IMAGE_PATH=${ARTIFACTS_IMAGE_PATH}" - echo "${LOGPREFIX} ARTIFACTS_LICENSES_PATH=${ARTIFACTS_LICENSES_PATH}" - echo "${LOGPREFIX} ARTIFACTS_SDK_PATH=${ARTIFACTS_SDK_PATH}" - echo "${LOGPREFIX} UPLOAD_PARAM=${UPLOAD_PARAM}" - echo "${LOGPREFIX} outdir=${outdir}" + echo "${LOGPREFIX} ARTIFACTS_IMAGE_PATH=$ARTIFACTS_IMAGE_PATH" + echo "${LOGPREFIX} ARTIFACTS_LICENSES_PATH=$ARTIFACTS_LICENSES_PATH" + echo "${LOGPREFIX} ARTIFACTS_SDK_PATH=$ARTIFACTS_SDK_PATH" + echo "${LOGPREFIX} UPLOAD_PARAM=$UPLOAD_PARAM" + echo "${LOGPREFIX} outdir=$outdir" # Check if the package_release script is available script=".gitlab-ci/scripts/package_release.py" diff --git a/scripts/download_file_from_latest_job.py b/scripts/download_file_from_latest_job.py new file mode 100755 index 0000000000000000000000000000000000000000..968586a8b0c6339b528cd8905d0c0801c5c15fc8 --- /dev/null +++ b/scripts/download_file_from_latest_job.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python3 +""" + +Downloads given file of the artifacts of a pipeline job. + +""" + +import argparse +import logging +import os +import sys + +import gitlab as gl + +__author__ = "Jonas Höppner" +__email__ = "jonas.hoeppner@garz-fricke.com" + +from download_job_artifacts import download_job_artifact +from get_pipeline_jobs import get_pipeline_jobs + +GITLAB_SERVER = "https://git.seco.com" + +verbose = 0 + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]") + + parser.add_argument( + "--gitlab-url", + help="""URL to the GitLab instance""", + dest="gitlab_url", + action="store", + default=GITLAB_SERVER, + ) + parser.add_argument( + "--token", + help="""GitLab REST API private access token""", + dest="token", + required=True, + ) + parser.add_argument( + "--project", + action="store", + dest="project", + help="Specify the project by either by id or by path.", + required=True, + ) + parser.add_argument( + "--pipeline", + action="store", + dest="pipeline", + help="Specify the pipeline by id.", + ) + parser.add_argument( + "-s", + "--stage", + action="store", + default=None, + help="Filter the jobs by the given stage, if omnitted all jobs are returned.", + ) + parser.add_argument( + "-n", + "--name", + action="store", + default=None, + help="Filter the jobs by given name, if omnitted all jobs are returned.", + ) + parser.add_argument( + "--path", + action="store", + default=None, + help="Path inside the artifacts, if set only one single file is downloaded instead of the complete artifacts.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + dest="verbose", + default=0, + help="Increase verbosity.", + ) + + options = parser.parse_args(args) + if options.verbose: + logging.basicConfig(level=logging.DEBUG) + + logging.debug(options) + gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token) + jobs = get_pipeline_jobs( + gitlab, options.project, options.pipeline, options.name, options.stage + ) + + def sort_by_finish_ts(j): + return j.finished_at + + jobs.sort(key=sort_by_finish_ts) + job = jobs[0] + + filename = download_job_artifact( + gitlab, dest=os.path.basename(options.path), path=options.path, job=job + ) + print("Downloaded {} for job {} to {}".format(options.path, job.name, filename)) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/scripts/download_job_artifacts.py b/scripts/download_job_artifacts.py new file mode 100755 index 0000000000000000000000000000000000000000..07717b1e27ca02f8cfcdcb1ed89c6b6f5e2c42f5 --- /dev/null +++ b/scripts/download_job_artifacts.py @@ -0,0 +1,164 @@ +#!/usr/bin/env python3 +""" + +Downloads the job artifacts of a given job + +""" + +import argparse +import logging +import sys +import os +import zipfile +import tempfile +import gitlab as gl + +__author__ = "Jonas Höppner" +__email__ = "jonas.hoeppner@garz-fricke.com" + +GITLAB_SERVER = "https://git.seco.com" + +verbose = 0 + + +def download_job_artifacts(gitlab, dest, job, project=None, extract=False): + """Downloads the artifacts and stores them ar dest/job_id + If extract is set, the downloaded zipfile is extracted and removed. + Returns either the dest path or the zipfile name. + """ + # Accept either a gitlab job object, or the project and the job id + if isinstance(job, gl.v4.objects.jobs.ProjectJob): + gl_job = job + else: + # Accept either an gitlab object, or name or id to specify a project + if isinstance(project, gl.v4.objects.projects.Project): + gl_project = project + else: + gl_project = gitlab.projects.get(project) + gl_job = gl_project.jobs.get(job) + + dest = os.path.join(dest, str(gl_job.id)) + os.makedirs(dest) + file_name = os.path.join(dest, "artifacts.zip") + with open(file_name, "wb") as f: + gl_job.artifacts(streamed=True, action=f.write) + if not extract: + return file_name + + zipfile.ZipFile(file_name).extractall(dest) + os.remove(file_name) + return dest + + +def download_job_artifact(gitlab, dest, path, job, project=None): + """Downloads one file from artifacts and stores it at dest""" + # Accept either a gitlab job object, or the project and the job id + if isinstance(job, gl.v4.objects.jobs.ProjectJob): + gl_job = job + else: + # Accept either an gitlab object, or name or id to specify a project + if isinstance(project, gl.v4.objects.projects.Project): + gl_project = project + else: + gl_project = gitlab.projects.get(project) + gl_job = gl_project.jobs.get(job) + + with open(dest, "wb") as f: + gl_job.artifact(path=path, streamed=True, action=f.write) + + return dest + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]") + + parser.add_argument( + "--gitlab-url", + help="""URL to the GitLab instance""", + dest="gitlab_url", + action="store", + default=GITLAB_SERVER, + ) + parser.add_argument( + "--token", + help="""GitLab REST API private access token""", + dest="token", + required=True, + ) + parser.add_argument( + "--project", + action="store", + dest="project", + help="Specify the project by either by id or by path.", + required=True, + ) + parser.add_argument( + "--job", + action="store", + dest="job", + help="Specify the job by id.", + required=True, + ) + parser.add_argument( + "--extract", + action="store_true", + dest="job", + default=False, + help="Specify if the artifacts should be extracted after download.", + ) + parser.add_argument( + "--dest", + "--destination", + action="store", + dest="destination", + default=None, + help="Folder where the artifacts are stored in, a local tmpfolder is generated if omnitted.", + ) + parser.add_argument( + "--path", + action="store", + default=None, + help="Path inside the artifacts, if set only one single file is downloaded instead of the complete artifacts.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + dest="verbose", + default=0, + help="Increase verbosity.", + ) + + options = parser.parse_args(args) + if options.verbose: + logging.basicConfig(level=logging.DEBUG) + + logging.debug(options) + gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token) + + if options.path is None: + if options.destination is None: + destination = tempfile.mkstemp() + else: + destination = options.destination + + filename = download_job_artifacts( + gitlab, destination, options.job, options.project, extract=True + ) + print("Downloaded artifacts for job {} to {}".format(options.job, filename)) + else: + if options.destination is None: + destination = tempfile.mkdtemp() + else: + destination = options.destination + + filename = download_job_artifact( + gitlab, destination, options.path, options.job, options.project + ) + print( + "Downloaded {} for job {} to {}".format(options.path, options.job, filename) + ) + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/scripts/get_pipeline_jobs.py b/scripts/get_pipeline_jobs.py new file mode 100755 index 0000000000000000000000000000000000000000..71f7f12ef9d250fd235a1c457248df16d2d4f759 --- /dev/null +++ b/scripts/get_pipeline_jobs.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 +""" + +Queries the jobs of a given pipeline, filtered by stage + +""" + +import argparse +import logging +import sys +import gitlab as gl + +__author__ = "Jonas Höppner" +__email__ = "jonas.hoeppner@garz-fricke.com" + +GITLAB_SERVER = "https://git.seco.com" + +verbose = 0 + + +def get_pipeline_pipelinejobs(gitlab, project, pipeline, name=None, stage=None): + # Accept either an gitlab object, or name or id to specify a project + if isinstance(project, gl.v4.objects.projects.Project): + gl_project = project + else: + gl_project = gitlab.projects.get(project) + + gl_pipeline = gl_project.pipelines.get(pipeline) + jobs = gl_pipeline.jobs.list(all=True) + if stage is not None: + jobs = list(filter(lambda j: j.stage == stage, jobs)) + if name is not None: + jobs = list(filter(lambda j: j.name == name, jobs)) + + return jobs + + +def get_pipeline_jobs(gitlab, project, pipeline, name=None, stage=None): + # Accept either an gitlab object, or name or id to specify a project + if isinstance(project, gl.v4.objects.projects.Project): + gl_project = project + else: + gl_project = gitlab.projects.get(project) + + pipeline_jobs = get_pipeline_pipelinejobs(gitlab, project, pipeline, name, stage) + jobs = [] + # Project Jobs from Pipeline Job + for job in pipeline_jobs: + jobs.append(gl_project.jobs.get(job.id)) + return jobs + + +def get_pipeline_job_ids(gitlab, project, pipeline, name=None, stage=None): + jobs = get_pipeline_pipelinejobs(gitlab, project, pipeline, name, stage) + return [j.id for j in jobs] + + +def main(args): + parser = argparse.ArgumentParser(description=__doc__, usage="%(prog)s [OPTIONS]") + + parser.add_argument( + "--gitlab-url", + help="""URL to the GitLab instance""", + dest="gitlab_url", + action="store", + default=GITLAB_SERVER, + ) + parser.add_argument( + "--token", + help="""GitLab REST API private access token""", + dest="token", + required=True, + ) + parser.add_argument( + "--project", + action="store", + dest="project", + help="Specify the project by either by id or by path.", + required=True, + ) + parser.add_argument( + "--pipeline", + action="store", + dest="pipeline", + help="Specify the pipeline by id.", + ) + parser.add_argument( + "-s", + "--stage", + action="store", + default=None, + help="Filter the jobs by the given stage, if omnitted all jobs are returned.", + ) + parser.add_argument( + "-n", + "--name", + action="store", + default=None, + help="Filter the jobs by given name, if omnitted all jobs are returned.", + ) + parser.add_argument( + "-v", + "--verbose", + action="count", + dest="verbose", + default=0, + help="Increase verbosity.", + ) + + options = parser.parse_args(args) + if options.verbose: + logging.basicConfig(level=logging.DEBUG) + + logging.debug(options) + gitlab = gl.Gitlab(options.gitlab_url, private_token=options.token) + jobs = get_pipeline_jobs( + gitlab, options.project, options.pipeline, options.name, options.stage + ) + + for j in jobs: + print(j.name, j.id) + + job_ids = get_pipeline_job_ids( + gitlab, options.project, options.pipeline, options.name, options.stage + ) + for j in job_ids: + print(j) + + +if __name__ == "__main__": + main(sys.argv[1:])